Regres: Add support for generating collated coverage info

`run_testlist` now supports the `--coverage` flag, which will collate the coverage information from each and every deqp test that's run.
This information is written to a `coverage.json` file which can be consumed by a html browser.

An early version of this can be found here (likely to move):
https://ben-clayton.github.io/swiftshader-coverage/

Bug: b/152192800
Change-Id: I52434f1ce30e6a091d2932fbae309cd81809cb79
Reviewed-on: https://swiftshader-review.googlesource.com/c/SwiftShader/+/42890
Reviewed-by: Antonio Maiorano <amaiorano@google.com>
Tested-by: Ben Clayton <bclayton@google.com>
Kokoro-Presubmit: kokoro <noreply+kokoro@google.com>
diff --git a/tests/regres/cmd/regres/main.go b/tests/regres/cmd/regres/main.go
index 0766fc5..a74f8cd 100644
--- a/tests/regres/cmd/regres/main.go
+++ b/tests/regres/cmd/regres/main.go
@@ -147,8 +147,8 @@
 	}
 
 	for _, path := range allDirs {
-		if _, err := os.Stat(*path); err != nil {
-			return cause.Wrap(err, "Couldn't find path '%v'", *path)
+		if !util.IsDir(*path) {
+			return fmt.Errorf("Couldn't find path '%v'", *path)
 		}
 	}
 
@@ -917,12 +917,12 @@
 	log.Printf("Running tests for '%s'\n", t.commit)
 
 	swiftshaderICDSo := filepath.Join(t.buildDir, "libvk_swiftshader.so")
-	if _, err := os.Stat(swiftshaderICDSo); err != nil {
+	if !util.IsFile(swiftshaderICDSo) {
 		return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDSo)
 	}
 
 	swiftshaderICDJSON := filepath.Join(t.buildDir, "Linux", "vk_swiftshader_icd.json")
-	if _, err := os.Stat(swiftshaderICDJSON); err != nil {
+	if !util.IsFile(swiftshaderICDJSON) {
 		return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDJSON)
 	}
 
diff --git a/tests/regres/cmd/run_testlist/main.go b/tests/regres/cmd/run_testlist/main.go
index 75a0f8e..65a5c70 100644
--- a/tests/regres/cmd/run_testlist/main.go
+++ b/tests/regres/cmd/run_testlist/main.go
@@ -21,9 +21,12 @@
 package main
 
 import (
+	"bytes"
+	"encoding/json"
 	"errors"
 	"flag"
 	"fmt"
+	"io/ioutil"
 	"log"
 	"math/rand"
 	"os"
@@ -33,9 +36,12 @@
 	"strings"
 	"time"
 
+	"../../cov"
 	"../../deqp"
+	"../../llvm"
 	"../../shell"
 	"../../testlist"
+	"../../util"
 )
 
 var (
@@ -48,6 +54,7 @@
 	limit         = flag.Int("limit", 0, "only run a maximum of this number of tests")
 	shuffle       = flag.Bool("shuffle", false, "shuffle tests")
 	noResults     = flag.Bool("no-results", false, "disable generation of results.json file")
+	genCoverage   = flag.Bool("coverage", false, "generate test coverage")
 )
 
 const testTimeout = time.Minute * 2
@@ -98,6 +105,15 @@
 		TestTimeout:      testTimeout,
 	}
 
+	if *genCoverage {
+		icdPath := findSwiftshaderICD()
+		config.CoverageEnv = &cov.Env{
+			LLVM:    findLLVMToolchain(icdPath),
+			RootDir: projectRootDir(),
+			ExePath: findSwiftshaderSO(icdPath),
+		}
+	}
+
 	res, err := config.Run()
 	if err != nil {
 		return err
@@ -113,6 +129,12 @@
 		}
 	}
 
+	if *genCoverage {
+		if err := ioutil.WriteFile("coverage.json", []byte(res.Coverage.JSON()), 0666); err != nil {
+			return err
+		}
+	}
+
 	if !*noResults {
 		err = res.Save(*output)
 		if err != nil {
@@ -123,6 +145,80 @@
 	return nil
 }
 
+func findSwiftshaderICD() string {
+	icdPaths := strings.Split(os.Getenv("VK_ICD_FILENAMES"), ";")
+	for _, icdPath := range icdPaths {
+		_, file := filepath.Split(icdPath)
+		if file == "vk_swiftshader_icd.json" {
+			return icdPath
+		}
+	}
+	panic("Cannot find vk_swiftshader_icd.json in VK_ICD_FILENAMES")
+}
+
+func findSwiftshaderSO(vkSwiftshaderICD string) string {
+	root := struct {
+		ICD struct {
+			Path string `json:"library_path"`
+		}
+	}{}
+
+	icd, err := ioutil.ReadFile(vkSwiftshaderICD)
+	if err != nil {
+		panic(fmt.Errorf("Could not read '%v'. %v", vkSwiftshaderICD, err))
+	}
+
+	if err := json.NewDecoder(bytes.NewReader(icd)).Decode(&root); err != nil {
+		panic(fmt.Errorf("Could not parse '%v'. %v", vkSwiftshaderICD, err))
+	}
+
+	if util.IsFile(root.ICD.Path) {
+		return root.ICD.Path
+	}
+	dir := filepath.Dir(vkSwiftshaderICD)
+	path, err := filepath.Abs(filepath.Join(dir, root.ICD.Path))
+	if err != nil {
+		panic(fmt.Errorf("Could not locate ICD so at '%v'. %v", root.ICD.Path, err))
+	}
+
+	return path
+}
+
+func findLLVMToolchain(vkSwiftshaderICD string) llvm.Toolchain {
+	minVersion := llvm.Version{Major: 8}
+
+	// Try finding the llvm toolchain via the CMake generated
+	// coverage-toolchain.txt file that sits next to vk_swiftshader_icd.json.
+	dir := filepath.Dir(vkSwiftshaderICD)
+	toolchainInfoPath := filepath.Join(dir, "coverage-toolchain.txt")
+	if util.IsFile(toolchainInfoPath) {
+		if body, err := ioutil.ReadFile(toolchainInfoPath); err == nil {
+			toolchain := llvm.Search(string(body)).FindAtLeast(minVersion)
+			if toolchain != nil {
+				return *toolchain
+			}
+		}
+	}
+
+	// Fallback, try searching PATH.
+	toolchain := llvm.Search().FindAtLeast(llvm.Version{Major: 8})
+	if toolchain == nil {
+		log.Fatal("Could not find LLVM toolchain")
+	}
+
+	return *toolchain
+}
+
+func projectRootDir() string {
+	_, thisFile, _, _ := runtime.Caller(1)
+	thisDir := filepath.Dir(thisFile)
+	root, err := filepath.Abs(filepath.Join(thisDir, "../../../.."))
+	if err != nil {
+		panic(err)
+	}
+	return root
+}
+
 func main() {
 	flag.ErrHelp = errors.New("regres is a tool to detect regressions between versions of SwiftShader")
 	flag.Parse()
diff --git a/tests/regres/cov/coverage.go b/tests/regres/cov/coverage.go
new file mode 100644
index 0000000..cd30d1b
--- /dev/null
+++ b/tests/regres/cov/coverage.go
@@ -0,0 +1,629 @@
+// Copyright 2020 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package cov provides functions for consuming and combining llvm coverage
+// information from multiple processes.
+package cov
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"../cause"
+	"../llvm"
+)
+
+// Location describes a single line-column position in a source file.
+type Location struct {
+	Line, Column int
+}
+
+func (l Location) String() string {
+	return fmt.Sprintf("%v:%v", l.Line, l.Column)
+}
+
+// Span describes a start and end interval in a source file.
+type Span struct {
+	Start, End Location
+}
+
+func (l Span) String() string {
+	return fmt.Sprintf("%v-%v", l.Start, l.End)
+}
+
+// File describes the coverage spans in a single source file.
+type File struct {
+	Path  string
+	Spans []Span
+}
+
+// Coverage describes the coverage spans for all the source files for a single
+// process invocation.
+type Coverage struct {
+	Files []File
+}
+
+// Env holds the enviroment settings for performing coverage processing.
+type Env struct {
+	LLVM    llvm.Toolchain
+	RootDir string // path to SwiftShader git root directory
+	ExePath string // path to the executable binary
+}
+
+// AppendRuntimeEnv returns the environment variables env with the
+// LLVM_PROFILE_FILE environment variable appended.
+func AppendRuntimeEnv(env []string, coverageFile string) []string {
+	return append(env, "LLVM_PROFILE_FILE="+coverageFile)
+}
+
+// Import uses the llvm-profdata and llvm-cov tools to import the coverage
+// information from a .profraw file.
+func (e Env) Import(profrawPath string) (*Coverage, error) {
+	profdata := profrawPath + ".profdata"
+
+	if err := exec.Command(e.LLVM.Profdata(), "merge", "-sparse", profrawPath, "-o", profdata).Run(); err != nil {
+		return nil, cause.Wrap(err, "llvm-profdata errored")
+	}
+	defer os.Remove(profdata)
+
+	args := []string{
+		"export",
+		e.ExePath,
+		"-instr-profile=" + profdata,
+		"-format=text",
+	}
+	if e.LLVM.Version.GreaterEqual(llvm.Version{Major: 9}) {
+		// LLVM 9 has new flags that omit stuff we don't care about.
+		args = append(args,
+			"-skip-expansions",
+			"-skip-functions",
+		)
+	}
+	data, err := exec.Command(e.LLVM.Cov(), args...).Output()
+	if err != nil {
+		return nil, cause.Wrap(err, "llvm-cov errored")
+	}
+
+	c, err := e.parse(data)
+	if err != nil {
+		return nil, cause.Wrap(err, "Couldn't parse coverage json data")
+	}
+
+	return c, nil
+}
+
+// https://clang.llvm.org/docs/SourceBasedCodeCoverage.html
+// https://stackoverflow.com/a/56792192
+func (e Env) parse(raw []byte) (*Coverage, error) {
+	// line int, col int, count int64, hasCount bool, isRegionEntry bool
+	type segment []interface{}
+
+	type file struct {
+		// expansions ignored
+		Name     string    `json:"filename"`
+		Segments []segment `json:"segments"`
+		// summary ignored
+	}
+
+	type data struct {
+		Files []file `json:"files"`
+	}
+
+	root := struct {
+		Data []data `json:"data"`
+	}{}
+	err := json.NewDecoder(bytes.NewReader(raw)).Decode(&root)
+	if err != nil {
+		return nil, err
+	}
+
+	c := &Coverage{Files: make([]File, 0, len(root.Data[0].Files))}
+	for _, f := range root.Data[0].Files {
+		relpath, err := filepath.Rel(e.RootDir, f.Name)
+		if err != nil {
+			return nil, err
+		}
+		if strings.HasPrefix(relpath, "..") {
+			continue
+		}
+		file := File{Path: relpath}
+		for sIdx := 0; sIdx+1 < len(f.Segments); sIdx++ {
+			start := Location{(int)(f.Segments[sIdx][0].(float64)), (int)(f.Segments[sIdx][1].(float64))}
+			end := Location{(int)(f.Segments[sIdx+1][0].(float64)), (int)(f.Segments[sIdx+1][1].(float64))}
+			covered := f.Segments[sIdx][2].(float64) != 0
+			if covered {
+				if c := len(file.Spans); c > 0 && file.Spans[c-1].End == start {
+					file.Spans[c-1].End = end
+				} else {
+					file.Spans = append(file.Spans, Span{start, end})
+				}
+			}
+		}
+		if len(file.Spans) > 0 {
+			c.Files = append(c.Files, file)
+		}
+	}
+	return c, nil
+}
+
+// Path is a tree node path formed from a list of strings
+type Path []string
+
+// Tree represents source code coverage across a tree of different processes.
+// Each tree node is addressed by a Path.
+type Tree struct {
+	initialized bool
+	strings     Strings
+	spans       map[Span]SpanID
+	testRoot    Test
+	files       map[string]TestCoverageMap
+}
+
+func (t *Tree) init() {
+	if !t.initialized {
+		t.strings.m = map[string]StringID{}
+		t.spans = map[Span]SpanID{}
+		t.testRoot = newTest()
+		t.files = map[string]TestCoverageMap{}
+		t.initialized = true
+	}
+}
+
+// Spans returns all the spans used by the tree
+func (t *Tree) Spans() []Span {
+	out := make([]Span, 0, len(t.spans))
+	for span := range t.spans {
+		out = append(out, span)
+	}
+	sort.Slice(out, func(i, j int) bool {
+		if out[i].Start.Line < out[j].Start.Line {
+			return true
+		}
+		if out[i].Start.Line > out[j].Start.Line {
+			return false
+		}
+		return out[i].Start.Column < out[j].Start.Column
+	})
+	return out
+}
+
+// File returns the TestCoverageMap for the given file
+func (t *Tree) File(path string) TestCoverageMap {
+	return t.files[path]
+}
+
+// Tests returns the root test
+func (t *Tree) Tests() *Test { return &t.testRoot }
+
+// Strings returns the string table
+func (t *Tree) Strings() Strings { return t.strings }
+
+type indexedTest struct {
+	index   TestIndex
+	created bool
+}
+
+func (t *Tree) index(path Path) []indexedTest {
+	out := make([]indexedTest, len(path))
+	test := &t.testRoot
+	for i, p := range path {
+		name := t.strings.index(p)
+		idx, ok := test.indices[name]
+		if !ok {
+			idx = TestIndex(len(test.children))
+			test.children = append(test.children, newTest())
+			test.indices[name] = idx
+		}
+		out[i] = indexedTest{idx, !ok}
+		test = &test.children[idx]
+	}
+	return out
+}
+
+func (t *Tree) addSpans(spans []Span) SpanSet {
+	out := make(SpanSet, len(spans))
+	for _, s := range spans {
+		id, ok := t.spans[s]
+		if !ok {
+			id = SpanID(len(t.spans))
+			t.spans[s] = id
+		}
+		out[id] = struct{}{}
+	}
+	return out
+}
+
+// Add adds the coverage information cov to the tree node addressed by path.
+func (t *Tree) Add(path Path, cov *Coverage) {
+	t.init()
+
+	tests := t.index(path)
+
+nextFile:
+	// For each file with coverage...
+	for _, file := range cov.Files {
+		// Lookup or create the file's test coverage map
+		tcm, ok := t.files[file.Path]
+		if !ok {
+			tcm = TestCoverageMap{}
+			t.files[file.Path] = tcm
+		}
+
+		// Add all the spans to the map, get the span ids
+		spans := t.addSpans(file.Spans)
+
+		// Starting from the test root, walk down the test tree.
+		test := t.testRoot
+		parent := (*TestCoverage)(nil)
+		for _, indexedTest := range tests {
+			if indexedTest.created {
+				if parent != nil && len(test.children) == 1 {
+					parent.Spans = parent.Spans.add(spans)
+					delete(parent.Children, indexedTest.index)
+				} else {
+					tc := tcm.index(indexedTest.index)
+					tc.Spans = spans
+				}
+				continue nextFile
+			}
+
+			test = test.children[indexedTest.index]
+			tc := tcm.index(indexedTest.index)
+
+			// If the tree node contains spans that are not in this new test,
+			// we need to push those spans down to all the other children.
+			if lower := tc.Spans.sub(spans); len(lower) > 0 {
+				// push into each child node
+				for i := range test.children {
+					child := tc.Children.index(TestIndex(i))
+					child.Spans = child.Spans.add(lower)
+				}
+				// remove from node
+				tc.Spans = tc.Spans.sub(lower)
+			}
+
+			// The spans that are in the new test, but are not part of the tree
+			// node carry propagating down.
+			spans = spans.sub(tc.Spans)
+			if len(spans) == 0 {
+				continue nextFile
+			}
+
+			tcm = tc.Children
+			parent = tc
+		}
+	}
+}
+
+// StringID is an identifier of a string
+type StringID int
+
+// Strings holds a map of string to identifier
+type Strings struct {
+	m map[string]StringID
+	s []string
+}
+
+func (s *Strings) index(str string) StringID {
+	i, ok := s.m[str]
+	if !ok {
+		i = StringID(len(s.s))
+		s.s = append(s.s, str)
+		s.m[str] = i
+	}
+	return i
+}
+
+// TestIndex is an child test index
+type TestIndex int
+
+// Test is an collection of named sub-tests
+type Test struct {
+	indices  map[StringID]TestIndex
+	children []Test
+}
+
+func newTest() Test {
+	return Test{
+		indices: map[StringID]TestIndex{},
+	}
+}
+
+type namedIndex struct {
+	name string
+	idx  TestIndex
+}
+
+func (t Test) byName(s Strings) []namedIndex {
+	out := make([]namedIndex, len(t.children))
+	for id, idx := range t.indices {
+		out[idx] = namedIndex{s.s[id], idx}
+	}
+	sort.Slice(out, func(i, j int) bool { return out[i].name < out[j].name })
+	return out
+}
+
+func (t Test) String(s Strings) string {
+	sb := strings.Builder{}
+	for i, n := range t.byName(s) {
+		child := t.children[n.idx]
+		if i > 0 {
+			sb.WriteString(" ")
+		}
+		sb.WriteString(n.name)
+		if len(child.children) > 0 {
+			sb.WriteString(fmt.Sprintf(":%v", child.String(s)))
+		}
+	}
+	return "{" + sb.String() + "}"
+}
+
+// TestCoverage holds the coverage information for a deqp test group / leaf.
+// For example:
+// The deqp test group may hold spans that are common for all children, and may
+// also optionally hold child nodes that describe coverage that differs per
+// child test.
+type TestCoverage struct {
+	Spans    SpanSet
+	Children TestCoverageMap
+}
+
+func (tc TestCoverage) String(t *Test, s Strings) string {
+	sb := strings.Builder{}
+	sb.WriteString(fmt.Sprintf("{%v", tc.Spans))
+	if len(tc.Children) > 0 {
+		sb.WriteString(" ")
+		sb.WriteString(tc.Children.String(t, s))
+	}
+	sb.WriteString("}")
+	return sb.String()
+}
+
+// TestCoverageMap is a map of TestIndex to *TestCoverage.
+type TestCoverageMap map[TestIndex]*TestCoverage
+
+func (tcm TestCoverageMap) String(t *Test, s Strings) string {
+	sb := strings.Builder{}
+	for _, n := range t.byName(s) {
+		if child, ok := tcm[n.idx]; ok {
+			sb.WriteString(fmt.Sprintf("\n%v: %v", n.name, child.String(&t.children[n.idx], s)))
+		}
+	}
+	if sb.Len() > 0 {
+		sb.WriteString("\n")
+	}
+	return indent(sb.String())
+}
+
+func newTestCoverage() *TestCoverage {
+	return &TestCoverage{
+		Children: TestCoverageMap{},
+		Spans:    SpanSet{},
+	}
+}
+
+func (tcm TestCoverageMap) index(idx TestIndex) *TestCoverage {
+	tc, ok := tcm[idx]
+	if !ok {
+		tc = newTestCoverage()
+		tcm[idx] = tc
+	}
+	return tc
+}
+
+// SpanID is an identifier of a span in a Tree.
+type SpanID int
+
+// SpanSet is a set of SpanIDs.
+type SpanSet map[SpanID]struct{}
+
+// List returns the full list of sorted span ids.
+func (s SpanSet) List() []SpanID {
+	out := make([]SpanID, 0, len(s))
+	for span := range s {
+		out = append(out, span)
+	}
+	sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
+	return out
+}
+
+func (s SpanSet) String() string {
+	sb := strings.Builder{}
+	sb.WriteString(`[`)
+	l := s.List()
+	for i, span := range l {
+		if i > 0 {
+			sb.WriteString(`, `)
+		}
+		sb.WriteString(fmt.Sprintf("%v", span))
+	}
+	sb.WriteString(`]`)
+	return sb.String()
+}
+
+func (s SpanSet) sub(rhs SpanSet) SpanSet {
+	out := make(SpanSet, len(s))
+	for span := range s {
+		if _, found := rhs[span]; !found {
+			out[span] = struct{}{}
+		}
+	}
+	return out
+}
+
+func (s SpanSet) add(rhs SpanSet) SpanSet {
+	out := make(SpanSet, len(s)+len(rhs))
+	for span := range s {
+		out[span] = struct{}{}
+	}
+	for span := range rhs {
+		out[span] = struct{}{}
+	}
+	return out
+}
+
+func indent(s string) string {
+	return strings.TrimSuffix(strings.ReplaceAll(s, "\n", "\n  "), "  ")
+}
+
+// JSON returns the full test tree serialized to JSON.
+func (t *Tree) JSON() string {
+	sb := &strings.Builder{}
+	sb.WriteString(`{`)
+
+	// write the strings
+	sb.WriteString(`"n":[`)
+	for i, s := range t.strings.s {
+		if i > 0 {
+			sb.WriteString(`,`)
+		}
+		sb.WriteString(`"`)
+		sb.WriteString(s)
+		sb.WriteString(`"`)
+	}
+	sb.WriteString(`]`)
+
+	// write the tests
+	sb.WriteString(`,"t":`)
+	t.writeTestJSON(&t.testRoot, sb)
+
+	// write the spans
+	sb.WriteString(`,"s":`)
+	t.writeSpansJSON(sb)
+
+	// write the files
+	sb.WriteString(`,"f":`)
+	t.writeFilesJSON(sb)
+
+	sb.WriteString(`}`)
+	return sb.String()
+}
+
+func (t *Tree) writeTestJSON(test *Test, sb *strings.Builder) {
+	names := map[int]StringID{}
+	for name, idx := range test.indices {
+		names[int(idx)] = name
+	}
+
+	sb.WriteString(`[`)
+	for i, child := range test.children {
+		if i > 0 {
+			sb.WriteString(`,`)
+		}
+		sb.WriteString(`[`)
+		sb.WriteString(fmt.Sprintf("%v,", names[i]))
+		t.writeTestJSON(&child, sb)
+		sb.WriteString(`]`)
+	}
+
+	sb.WriteString(`]`)
+}
+
+func (t *Tree) writeSpansJSON(sb *strings.Builder) {
+	type spanAndID struct {
+		span Span
+		id   SpanID
+	}
+	spans := make([]spanAndID, 0, len(t.spans))
+	for span, id := range t.spans {
+		spans = append(spans, spanAndID{span, id})
+	}
+	sort.Slice(spans, func(i, j int) bool { return spans[i].id < spans[j].id })
+
+	sb.WriteString(`[`)
+	for i, s := range spans {
+		if i > 0 {
+			sb.WriteString(`,`)
+		}
+		span := s.span
+		sb.WriteString(fmt.Sprintf("[%v,%v,%v,%v]",
+			span.Start.Line, span.Start.Column,
+			span.End.Line, span.End.Column))
+	}
+
+	sb.WriteString(`]`)
+}
+
+func (t *Tree) writeFilesJSON(sb *strings.Builder) {
+	paths := make([]string, 0, len(t.files))
+	for path := range t.files {
+		paths = append(paths, path)
+	}
+	sort.Strings(paths)
+
+	sb.WriteString(`{`)
+	for i, path := range paths {
+		if i > 0 {
+			sb.WriteString(`,`)
+		}
+		sb.WriteString(`"`)
+		sb.WriteString(path)
+		sb.WriteString(`":`)
+		t.writeCoverageMapJSON(t.files[path], sb)
+	}
+
+	sb.WriteString(`}`)
+}
+
+func (t *Tree) writeCoverageMapJSON(c TestCoverageMap, sb *strings.Builder) {
+	ids := make([]TestIndex, 0, len(c))
+	for id := range c {
+		ids = append(ids, id)
+	}
+	sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] })
+
+	sb.WriteString(`[`)
+	for i, id := range ids {
+		if i > 0 {
+			sb.WriteString(`,`)
+		}
+
+		sb.WriteString(`[`)
+		sb.WriteString(fmt.Sprintf("%v", id))
+		sb.WriteString(`,`)
+		t.writeCoverageJSON(c[id], sb)
+		sb.WriteString(`]`)
+	}
+	sb.WriteString(`]`)
+}
+
+func (t *Tree) writeCoverageJSON(c *TestCoverage, sb *strings.Builder) {
+	sb.WriteString(`{`)
+	comma := false
+	if len(c.Spans) > 0 {
+		sb.WriteString(`"s":[`)
+		for i, spanID := range c.Spans.List() {
+			if i > 0 {
+				sb.WriteString(`,`)
+			}
+			sb.WriteString(fmt.Sprintf("%v", spanID))
+		}
+		sb.WriteString(`]`)
+		comma = true
+	}
+	if len(c.Children) > 0 {
+		if comma {
+			sb.WriteString(`,`)
+		}
+		sb.WriteString(`"c":`)
+		t.writeCoverageMapJSON(c.Children, sb)
+	}
+	sb.WriteString(`}`)
+}
diff --git a/tests/regres/cov/coverage_test.go b/tests/regres/cov/coverage_test.go
new file mode 100644
index 0000000..8695d7d
--- /dev/null
+++ b/tests/regres/cov/coverage_test.go
@@ -0,0 +1,257 @@
+// Copyright 2020 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cov_test
+
+import (
+	"reflect"
+	"strings"
+	"testing"
+
+	cov "."
+)
+
+var (
+	fileA = "coverage/file/a"
+	fileB = "coverage/file/b"
+	fileC = "coverage/file/c"
+	fileD = "coverage/file/c"
+
+	span0 = cov.Span{cov.Location{3, 2}, cov.Location{3, 9}}
+	span1 = cov.Span{cov.Location{4, 1}, cov.Location{5, 1}}
+	span2 = cov.Span{cov.Location{5, 5}, cov.Location{5, 7}}
+	span3 = cov.Span{cov.Location{7, 2}, cov.Location{7, 7}}
+)
+
+//                a
+//        ╭───────┴───────╮
+//        b               c
+//    ╭───┴───╮       ╭───┴───╮
+//    d       e       f       g
+//  ╭─┴─╮   ╭─┴─╮   ╭─┴─╮   ╭─┴─╮
+//  h   i   j   k   l   m   n   o
+//     ╭┴╮ ╭┴╮ ╭┴╮ ╭┴╮ ╭┴╮ ╭╯
+//     p q r s t u v w x y z
+//
+
+func TestTree(t *testing.T) {
+	tree := &cov.Tree{}
+
+	t.Log("Add 'b' with the coverage [0,1]")
+	tree.Add(cov.Path{"a", "b"}, coverage(fileA, span0, span1))
+
+	//           [0,1]
+	//            (a)
+	//       ╭─────╯
+	//       b
+
+	checkSpans(t, tree.Spans(), span0, span1)
+	checkTests(t, tree, `{a:{b}}`)
+	checkCoverage(t, tree, fileA, `a:{[0,1]}`)
+
+	t.Log("Add 'i' with the coverage [0,1]")
+	tree.Add(cov.Path{"a", "b", "d", "i"}, coverage(fileA, span0, span1))
+
+	//           [0,1]
+	//            (a)
+	//       ╭─────╯
+	//       b
+	//    ╭──╯
+	//    d
+	//    ╰─╮
+	//      i
+	checkSpans(t, tree.Spans(), span0, span1)
+	checkTests(t, tree, `{a:{b:{d:{i}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[0,1]}`)
+
+	t.Log("Add 'e' with the coverage [0,1,2]")
+	tree.Add(cov.Path{"a", "b", "e"}, coverage(fileA, span0, span1, span2))
+
+	//           [0,1]
+	//            (a)
+	//       ┏━━━━━┛
+	//      (b)
+	//    ╭──┺━━┓
+	//    d    (e)[2]
+	//    ╰─╮
+	//      i
+	checkSpans(t, tree.Spans(), span0, span1, span2)
+	checkTests(t, tree, `{a:{b:{d:{i} e}}}`)
+	checkCoverage(t, tree, fileA, `a:{[0,1] b:{[] e:{[2]}}}`)
+
+	t.Log("Add 'n' with the coverage [0,3]")
+	tree.Add(cov.Path{"a", "c", "g", "n"}, coverage(fileA, span0, span3))
+
+	//            [0]
+	//            (a)
+	//       ┏━━━━━┻━━━━━┓
+	//   [1](b)         (c)[3]
+	//    ╭──┺━━┓        ╰──╮
+	//    d    (e)[2]       g
+	//    ╰─╮             ╭─╯
+	//      i             n
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i}e}c:{g:{n}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[0] b:{[1] e:{[2]}} c:{[3]}}`)
+
+	t.Log("Add 'o' with the coverage [0, 3]")
+	tree.Add(cov.Path{"a", "c", "g", "o"}, coverage(fileA, span0, span3))
+
+	//              [0]
+	//              (a)
+	//       ┏━━━━━━━┻━━━━━━━┓
+	//   [1](b)             (c)[3]
+	//    ╭──┺━━┓            ╰──╮
+	//    d    (e)[2]           g
+	//    ╰─╮                 ╭─┴─╮
+	//      i                 n   o
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i}e}c:{g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[0] b:{[1] e:{[2]}} c:{[3]}}`)
+
+	t.Log("Add 'f' with the coverage [1]")
+	tree.Add(cov.Path{"a", "c", "f"}, coverage(fileA, span1))
+
+	//               (a)
+	//       ┏━━━━━━━━┻━━━━━━━━┓
+	// [0,1](b)               (c)
+	//    ╭──┺━━┓           ┏━━┻━━┓
+	//    d    (e)[2]   [1](f)   (g)[0,3]
+	//    ╰─╮                   ╭─┴─╮
+	//      i                   n   o
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e} c:{f g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[0,1] e:{[2]}} c:{[] f:{[1]} g:{[0,3]}}}`)
+
+	t.Log("Add 'j' with the coverage [3]")
+	tree.Add(cov.Path{"a", "b", "e", "j"}, coverage(fileA, span3))
+
+	//                   (a)
+	//           ┏━━━━━━━━┻━━━━━━━━┓
+	//          (b)               (c)
+	//       ┏━━━┻━━━┓          ┏━━┻━━┓
+	// [0,1](d)     (e)[3]  [1](f)   (g)[0,3]
+	//       ╰─╮   ╭─╯              ╭─┴─╮
+	//         i   j                n   o
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e:{j}} c:{f g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[] d:{[0,1]} e:{[3]}} c:{[] f:{[1]} g:{[0,3]}}}`)
+
+	t.Log("Add 'k' with the coverage [3]")
+	tree.Add(cov.Path{"a", "b", "e", "k"}, coverage(fileA, span3))
+
+	//                   (a)
+	//           ┏━━━━━━━━┻━━━━━━━━┓
+	//          (b)               (c)
+	//       ┏━━━┻━━━┓          ┏━━┻━━┓
+	// [0,1](d)     (e)[3]  [1](f)   (g)[0,3]
+	//       ╰─╮   ╭─┴─╮            ╭─┴─╮
+	//         i   j   k            n   o
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e:{j k}} c:{f g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[] d:{[0,1]} e:{[3]}} c:{[] f:{[1]} g:{[0,3]}}}`)
+
+	t.Log("Add 'v' with the coverage [1,2]")
+	tree.Add(cov.Path{"a", "c", "f", "l", "v"}, coverage(fileA, span1, span2))
+
+	//                   (a)
+	//           ┏━━━━━━━━┻━━━━━━━━━━┓
+	//          (b)                 (c)
+	//       ┏━━━┻━━━┓            ┏━━┻━━┓
+	// [0,1](d)     (e)[3]  [1,2](f)   (g)[0,3]
+	//       ╰─╮   ╭─┴─╮        ╭─╯   ╭─┴─╮
+	//         i   j   k        l     n   o
+	//                         ╭╯
+	//                         v
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e:{j k}} c:{f:{l:{v}} g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[] d:{[0,1]} e:{[3]}} c:{[] f:{[1,2]} g:{[0,3]}}}`)
+
+	t.Log("Add 'x' with the coverage [1,2]")
+	tree.Add(cov.Path{"a", "c", "f", "l", "x"}, coverage(fileA, span1, span2))
+
+	//                   (a)
+	//           ┏━━━━━━━━┻━━━━━━━━━━┓
+	//          (b)                 (c)
+	//       ┏━━━┻━━━┓            ┏━━┻━━┓
+	// [0,1](d)     (e)[3]  [1,2](f)   (g)[0,3]
+	//       ╰─╮   ╭─┴─╮        ╭─╯   ╭─┴─╮
+	//         i   j   k        l     n   o
+	//                         ╭┴╮
+	//                         v x
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e:{j k}} c:{f:{l:{v x}} g:{n o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[] d:{[0,1]} e:{[3]}} c:{[] f:{[1,2]} g:{[0,3]}}}`)
+
+	t.Log("Add 'z' with the coverage [2]")
+	tree.Add(cov.Path{"a", "c", "g", "n", "z"}, coverage(fileA, span2))
+
+	//                   (a)
+	//           ┏━━━━━━━━┻━━━━━━━━━━━━┓
+	//          (b)                   (c)
+	//       ┏━━━┻━━━┓            ┏━━━━┻━━━━┓
+	// [0,1](d)     (e)[3]  [1,2](f)       (g)
+	//       ╰─╮   ╭─┴─╮        ╭─╯       ┏━┻━┓
+	//         i   j   k        l    [2](n) (o)[0,3]
+	//                         ╭┴╮      ╭╯
+	//                         v x      z
+	checkSpans(t, tree.Spans(), span0, span1, span2, span3)
+	checkTests(t, tree, `{a:{b:{d:{i} e:{j k}} c:{f:{l:{v x}} g:{n: {z} o}}}}`)
+	checkCoverage(t, tree, fileA, `a:{[] b:{[] d:{[0,1]} e:{[3]}} c:{[] f:{[1,2]} g:{[] n:{[2]} o:{[0,3]}}}}`)
+}
+
+func checkSpans(t *testing.T, got []cov.Span, expect ...cov.Span) {
+	if !reflect.DeepEqual(got, expect) {
+		t.Errorf("Spans not as expected.\nGot:    %+v\nExpect: %+v", got, expect)
+	}
+}
+
+func checkTests(t *testing.T, tree *cov.Tree, expect string) {
+	g, e := tree.Tests().String(tree.Strings()), expect
+	if tg, te := trimWS(g), trimWS(e); tg != te {
+		t.Errorf("Tests not as expected.\nGot:\n%v\nExpect:\n%v\n------\nGot:    %v\nExpect: %v", g, e, tg, te)
+	}
+}
+
+func checkCoverage(t *testing.T, tree *cov.Tree, file string, expect string) {
+	g, e := tree.File(file).String(tree.Tests(), tree.Strings()), expect
+	if tg, te := trimWS(g), trimWS(e); tg != te {
+		t.Errorf("Coverage not as expected.\nGot:\n%v\nExpect:\n%v\n------\nGot:    %v\nExpect: %v", g, e, tg, te)
+	}
+}
+
+func trimWS(s string) string {
+	s = strings.ReplaceAll(s, " ", "")
+	s = strings.ReplaceAll(s, "\n", "")
+	return s
+}
+
+func coverage(file string, spans ...cov.Span) *cov.Coverage {
+	return &cov.Coverage{
+		[]cov.File{
+			cov.File{
+				Path:  file,
+				Spans: spans,
+			},
+		},
+	}
+}
+
+func spans(ids ...cov.SpanID) cov.SpanSet {
+	out := make(cov.SpanSet, len(ids))
+	for _, id := range ids {
+		out[id] = struct{}{}
+	}
+	return out
+}
diff --git a/tests/regres/deqp/deqp.go b/tests/regres/deqp/deqp.go
index 27beccf..fa5dc8f 100644
--- a/tests/regres/deqp/deqp.go
+++ b/tests/regres/deqp/deqp.go
@@ -19,6 +19,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"io/ioutil"
 	"log"
 	"math/rand"
 	"os"
@@ -31,6 +32,7 @@
 	"time"
 
 	"../cause"
+	"../cov"
 	"../shell"
 	"../testlist"
 	"../util"
@@ -59,10 +61,12 @@
 	ExeGles2         string
 	ExeGles3         string
 	ExeVulkan        string
+	TempDir          string // Directory for temporary log files, coverage output.
 	TestLists        testlist.Lists
 	Env              []string
 	LogReplacements  map[string]string
 	NumParallelTests int
+	CoverageEnv      *cov.Env
 	TestTimeout      time.Duration
 }
 
@@ -72,6 +76,7 @@
 	Version  int
 	Error    string
 	Tests    map[string]TestResult
+	Coverage *cov.Tree
 	Duration time.Duration
 }
 
@@ -81,6 +86,7 @@
 	Status    testlist.Status
 	TimeTaken time.Duration
 	Err       string `json:",omitempty"`
+	Coverage  *cov.Coverage
 }
 
 func (r TestResult) String() string {
@@ -131,9 +137,16 @@
 
 // Run runs all the tests.
 func (c *Config) Run() (*Results, error) {
-
 	start := time.Now()
 
+	if c.TempDir == "" {
+		dir, err := ioutil.TempDir("", "deqp")
+		if err != nil {
+			return nil, cause.Wrap(err, "Could not generate temporary directory")
+		}
+		c.TempDir = dir
+	}
+
 	// Wait group that completes once all the tests have finished.
 	wg := sync.WaitGroup{}
 	results := make(chan TestResult, 256)
@@ -200,6 +213,10 @@
 		Tests:   map[string]TestResult{},
 	}
 
+	if c.CoverageEnv != nil {
+		out.Coverage = &cov.Tree{}
+	}
+
 	// Collect the results.
 	finished := make(chan struct{})
 	lastUpdate := time.Now()
@@ -207,7 +224,6 @@
 		start, i := time.Now(), 0
 		for r := range results {
 			i++
-			out.Tests[r.Test] = r
 			if time.Since(lastUpdate) > time.Minute {
 				lastUpdate = time.Now()
 				remaining := numTests - i
@@ -215,6 +231,12 @@
 					i, numTests, util.Percent(i, numTests),
 					(time.Since(start)/time.Duration(i))*time.Duration(remaining))
 			}
+			out.Tests[r.Test] = r
+			if r.Coverage != nil {
+				path := strings.Split(r.Test, ".")
+				out.Coverage.Add(cov.Path(path), r.Coverage)
+				r.Coverage = nil // Free memory
+			}
 		}
 		close(finished)
 	}()
@@ -263,6 +285,12 @@
 		env = append(env, v)
 	}
 
+	coverageFile := filepath.Join(c.TempDir, fmt.Sprintf("%v.profraw", goroutineIndex))
+	if c.CoverageEnv != nil {
+		env = cov.AppendRuntimeEnv(env, coverageFile)
+	}
+	logPath := filepath.Join(c.TempDir, fmt.Sprintf("%v.log", goroutineIndex))
+
 nextTest:
 	for name := range tests {
 		// log.Printf("Running test '%s'\n", name)
@@ -274,6 +302,7 @@
 			"--deqp-log-images=disable",
 			"--deqp-log-shader-sources=disable",
 			"--deqp-log-flush=disable",
+			"--deqp-log-filename="+logPath,
 			"-n="+name)
 		duration := time.Since(start)
 		out := string(outRaw)
@@ -282,6 +311,15 @@
 			out = strings.ReplaceAll(out, k, v)
 		}
 
+		var coverage *cov.Coverage
+		if c.CoverageEnv != nil {
+			coverage, err = c.CoverageEnv.Import(coverageFile)
+			if err != nil {
+				log.Printf("Warning: Failed to get test coverage for test '%v'. %v", name, err)
+			}
+			os.Remove(coverageFile)
+		}
+
 		for _, test := range []struct {
 			re *regexp.Regexp
 			s  testlist.Status
@@ -298,6 +336,7 @@
 					Status:    test.s,
 					TimeTaken: duration,
 					Err:       s,
+					Coverage:  coverage,
 				}
 				continue nextTest
 			}
@@ -319,6 +358,7 @@
 				Status:    testlist.Crash,
 				TimeTaken: duration,
 				Err:       out,
+				Coverage:  coverage,
 			}
 		case shell.ErrTimeout:
 			log.Printf("Timeout for test '%v'\n", name)
@@ -326,34 +366,35 @@
 				Test:      name,
 				Status:    testlist.Timeout,
 				TimeTaken: duration,
+				Coverage:  coverage,
 			}
 		case nil:
 			toks := deqpRE.FindStringSubmatch(out)
 			if len(toks) < 3 {
 				err := fmt.Sprintf("Couldn't parse test '%v' output:\n%s", name, out)
 				log.Println("Warning: ", err)
-				results <- TestResult{Test: name, Status: testlist.Fail, Err: err}
+				results <- TestResult{Test: name, Status: testlist.Fail, Err: err, Coverage: coverage}
 				continue
 			}
 			switch toks[1] {
 			case "Pass":
-				results <- TestResult{Test: name, Status: testlist.Pass, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.Pass, TimeTaken: duration, Coverage: coverage}
 			case "NotSupported":
-				results <- TestResult{Test: name, Status: testlist.NotSupported, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.NotSupported, TimeTaken: duration, Coverage: coverage}
 			case "CompatibilityWarning":
-				results <- TestResult{Test: name, Status: testlist.CompatibilityWarning, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.CompatibilityWarning, TimeTaken: duration, Coverage: coverage}
 			case "QualityWarning":
-				results <- TestResult{Test: name, Status: testlist.QualityWarning, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.QualityWarning, TimeTaken: duration, Coverage: coverage}
 			case "Fail":
 				var err string
 				if toks[2] != "Fail" {
 					err = toks[2]
 				}
-				results <- TestResult{Test: name, Status: testlist.Fail, Err: err, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.Fail, Err: err, TimeTaken: duration, Coverage: coverage}
 			default:
 				err := fmt.Sprintf("Couldn't parse test output:\n%s", out)
 				log.Println("Warning: ", err)
-				results <- TestResult{Test: name, Status: testlist.Fail, Err: err, TimeTaken: duration}
+				results <- TestResult{Test: name, Status: testlist.Fail, Err: err, TimeTaken: duration, Coverage: coverage}
 			}
 		}
 	}
diff --git a/tests/regres/llvm/llvm.go b/tests/regres/llvm/llvm.go
new file mode 100644
index 0000000..f534295
--- /dev/null
+++ b/tests/regres/llvm/llvm.go
@@ -0,0 +1,173 @@
+// Copyright 2020 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package llvm provides functions and types for locating and using the llvm
+// toolchains.
+package llvm
+
+import (
+	"fmt"
+	"os/exec"
+	"path/filepath"
+	"regexp"
+	"runtime"
+	"sort"
+	"strconv"
+
+	"../util"
+)
+
+const maxLLVMVersion = 10
+
+// Version holds the build version information of an LLVM toolchain.
+type Version struct {
+	Major, Minor, Point int
+}
+
+// GreaterEqual returns true if v >= rhs.
+func (v Version) GreaterEqual(rhs Version) bool {
+	if v.Major > rhs.Major {
+		return true
+	}
+	if v.Major < rhs.Major {
+		return false
+	}
+	if v.Minor > rhs.Minor {
+		return true
+	}
+	if v.Minor < rhs.Minor {
+		return false
+	}
+	return v.Point >= rhs.Point
+}
+
+// Toolchain holds the paths and version information about an LLVM toolchain.
+type Toolchain struct {
+	Version Version
+	BinDir  string
+}
+
+// Toolchains is a list of Toolchain
+type Toolchains []Toolchain
+
+// FindAtLeast looks for a toolchain with the given version, returning the highest found version.
+func (l Toolchains) FindAtLeast(v Version) *Toolchain {
+	out := (*Toolchain)(nil)
+	for _, t := range l {
+		if t.Version.GreaterEqual(v) && (out == nil || out.Version.GreaterEqual(t.Version)) {
+			t := t
+			out = &t
+		}
+	}
+	return out
+}
+
+// Search looks for llvm toolchains in paths.
+// If paths is empty, then PATH is searched.
+func Search(paths ...string) Toolchains {
+	toolchains := map[Version]Toolchain{}
+	search := func(name string) {
+		if len(paths) > 0 {
+			for _, path := range paths {
+				if util.IsFile(path) {
+					path = filepath.Dir(path)
+				}
+				if t := toolchain(path); t != nil {
+					toolchains[t.Version] = *t
+					continue
+				}
+				if t := toolchain(filepath.Join(path, "bin")); t != nil {
+					toolchains[t.Version] = *t
+					continue
+				}
+			}
+		} else {
+			path, err := exec.LookPath(name)
+			if err == nil {
+				if t := toolchain(filepath.Dir(path)); t != nil {
+					toolchains[t.Version] = *t
+				}
+			}
+		}
+	}
+
+	search("clang")
+	for i := 8; i < maxLLVMVersion; i++ {
+		search(fmt.Sprintf("clang-%d", i))
+	}
+
+	out := make([]Toolchain, 0, len(toolchains))
+	for _, t := range toolchains {
+		out = append(out, t)
+	}
+	sort.Slice(out, func(i, j int) bool { return out[i].Version.GreaterEqual(out[j].Version) })
+
+	return out
+}
+
+// Cov returns the path to the llvm-cov executable.
+func (t Toolchain) Cov() string {
+	return filepath.Join(t.BinDir, "llvm-cov"+exeExt())
+}
+
+// Profdata returns the path to the llvm-profdata executable.
+func (t Toolchain) Profdata() string {
+	return filepath.Join(t.BinDir, "llvm-profdata"+exeExt())
+}
+
+func toolchain(dir string) *Toolchain {
+	t := Toolchain{BinDir: dir}
+	if t.resolve() {
+		return &t
+	}
+	return nil
+}
+
+func (t *Toolchain) resolve() bool {
+	if !util.IsFile(t.Profdata()) { // llvm-profdata doesn't have --version flag
+		return false
+	}
+	version, ok := parseVersion(t.Cov())
+	t.Version = version
+	return ok
+}
+
+func exeExt() string {
+	switch runtime.GOOS {
+	case "windows":
+		return ".exe"
+	default:
+		return ""
+	}
+}
+
+var versionRE = regexp.MustCompile(`(?:clang|LLVM) version ([0-9]+)\.([0-9]+)\.([0-9]+)`)
+
+func parseVersion(tool string) (Version, bool) {
+	out, err := exec.Command(tool, "--version").Output()
+	if err != nil {
+		return Version{}, false
+	}
+	matches := versionRE.FindStringSubmatch(string(out))
+	if len(matches) < 4 {
+		return Version{}, false
+	}
+	major, majorErr := strconv.Atoi(matches[1])
+	minor, minorErr := strconv.Atoi(matches[2])
+	point, pointErr := strconv.Atoi(matches[3])
+	if majorErr != nil || minorErr != nil || pointErr != nil {
+		return Version{}, false
+	}
+	return Version{major, minor, point}, true
+}