Regres: Generate coverage data on nightly runs

Push this to https://github.com/swiftshader-regres/swiftshader-coverage so that it can be viewed at:
https://swiftshader-regres.github.io/swiftshader-coverage/

There's quite a lot of new code and fixes in this change. The most notable:
* The regres daily run for the subzero backend now produces combined coverage information for all the test runs. The LLVM backend does not produce coverage information.
* Regres now takes two additional command line arguments: `gh-user` and `gh-pass` for the swiftshader-regres account. If you omit these, then coverage will not be produced.
* test.srcDir has been renamed to checkoutDir, as this was confusing with the `src` directory in the repo.
* The coverage JSON now contains a root field to describe the git revision to which it relates. This prevents the coverage going out of sync with the source.
* `git.CheckoutRemoteBranch()` drops back to a depth of 1 again. This was only increased to 99 to deal with issues checking out from gitlab, which we don't do any more.
* Regres  now builds using `third_party/llvm-10.0`
* Fixed the `--limit` regres command line flag which wasn't actually limiting, as it was using the len() on the number of groups, not the number of tests.

Bug: b/152192800
Bug: b/152339534
Change-Id: I2d25735f485097d4efb080546d989056a3a8aab3
Reviewed-on: https://swiftshader-review.googlesource.com/c/SwiftShader/+/43168
Kokoro-Presubmit: kokoro <noreply+kokoro@google.com>
Reviewed-by: Nicolas Capens <nicolascapens@google.com>
Tested-by: Ben Clayton <bclayton@google.com>
diff --git a/tests/regres/cmd/regres/main.go b/tests/regres/cmd/regres/main.go
index d8ad0a0..a971dae 100644
--- a/tests/regres/cmd/regres/main.go
+++ b/tests/regres/cmd/regres/main.go
@@ -26,12 +26,14 @@
 package main
 
 import (
+	"archive/zip"
 	"crypto/sha1"
 	"encoding/hex"
 	"encoding/json"
 	"errors"
 	"flag"
 	"fmt"
+	"io"
 	"io/ioutil"
 	"log"
 	"math"
@@ -47,6 +49,7 @@
 
 	"../../cause"
 	"../../consts"
+	"../../cov"
 	"../../deqp"
 	"../../git"
 	"../../llvm"
@@ -58,17 +61,19 @@
 )
 
 const (
-	gitURL                  = "https://swiftshader.googlesource.com/SwiftShader"
-	gerritURL               = "https://swiftshader-review.googlesource.com/"
-	reportHeader            = "Regres report:"
-	changeUpdateFrequency   = time.Minute * 5
-	changeQueryFrequency    = time.Minute * 5
-	testTimeout             = time.Minute * 2  // timeout for a single test
-	buildTimeout            = time.Minute * 10 // timeout for a build
-	dailyUpdateTestListHour = 5                // 5am
-	fullTestListRelPath     = "tests/regres/full-tests.json"
-	ciTestListRelPath       = "tests/regres/ci-tests.json"
-	deqpConfigRelPath       = "tests/regres/deqp.json"
+	gitURL                = "https://swiftshader.googlesource.com/SwiftShader"
+	gerritURL             = "https://swiftshader-review.googlesource.com/"
+	coverageURL           = "https://$USERNAME:$PASSWORD@github.com/swiftshader-regres/swiftshader-coverage.git"
+	coverageBranch        = "gh-pages"
+	coveragePath          = "coverage/coverage.zip"
+	reportHeader          = "Regres report:"
+	changeUpdateFrequency = time.Minute * 5
+	changeQueryFrequency  = time.Minute * 5
+	testTimeout           = time.Minute * 2  // timeout for a single test
+	buildTimeout          = time.Minute * 10 // timeout for a build
+	fullTestListRelPath   = "tests/regres/full-tests.json"
+	ciTestListRelPath     = "tests/regres/ci-tests.json"
+	deqpConfigRelPath     = "tests/regres/deqp.json"
 )
 
 var (
@@ -79,6 +84,8 @@
 	gerritEmail   = flag.String("email", "$SS_REGRES_EMAIL", "gerrit email address for posting regres results")
 	gerritUser    = flag.String("user", "$SS_REGRES_USER", "gerrit username for posting regres results")
 	gerritPass    = flag.String("pass", "$SS_REGRES_PASS", "gerrit password for posting regres results")
+	githubUser    = flag.String("gh-user", "$SS_GITHUB_USER", "github user for posting coverage results")
+	githubPass    = flag.String("gh-pass", "$SS_GITHUB_PASS", "github password for posting coverage results")
 	keepCheckouts = flag.Bool("keep", false, "don't delete checkout directories after use")
 	dryRun        = flag.Bool("dry", false, "don't post regres reports to gerrit")
 	maxProcMemory = flag.Uint64("max-proc-mem", shell.MaxProcMemory, "maximum virtual memory per child process")
@@ -100,6 +107,8 @@
 		gerritEmail:   os.ExpandEnv(*gerritEmail),
 		gerritUser:    os.ExpandEnv(*gerritUser),
 		gerritPass:    os.ExpandEnv(*gerritPass),
+		githubUser:    os.ExpandEnv(*githubUser),
+		githubPass:    os.ExpandEnv(*githubPass),
 		keepCheckouts: *keepCheckouts,
 		dryRun:        *dryRun,
 		dailyNow:      *dailyNow,
@@ -124,6 +133,8 @@
 	gerritEmail   string          // gerrit email address used for posting results
 	gerritUser    string          // gerrit username used for posting results
 	gerritPass    string          // gerrit password used for posting results
+	githubUser    string          // github username used for posting results
+	githubPass    string          // github password used for posting results
 	keepCheckouts bool            // don't delete source & build checkouts after testing
 	dryRun        bool            // don't post any reviews
 	maxProcMemory uint64          // max virtual memory for child processes
@@ -284,12 +295,12 @@
 	}
 
 	for {
-		if now := time.Now(); toDate(now) != lastUpdatedTestLists && now.Hour() >= dailyUpdateTestListHour {
+		if now := time.Now(); toDate(now) != lastUpdatedTestLists {
 			lastUpdatedTestLists = toDate(now)
-			if err := r.updateTestLists(client, backendSubzero); err != nil {
+			if err := r.runDaily(client, backendSubzero, true); err != nil {
 				log.Println(err.Error())
 			}
-			if err := r.updateTestLists(client, backendLLVM); err != nil {
+			if err := r.runDaily(client, backendLLVM, false); err != nil {
 				log.Println(err.Error())
 			}
 		}
@@ -420,14 +431,14 @@
 }
 
 func (r *regres) getOrBuildDEQP(test *test) (deqpBuild, error) {
-	srcDir := test.srcDir
-	if p := path.Join(srcDir, deqpConfigRelPath); !util.IsFile(p) {
-		srcDir, _ = os.Getwd()
+	checkoutDir := test.checkoutDir
+	if p := path.Join(checkoutDir, deqpConfigRelPath); !util.IsFile(p) {
+		checkoutDir, _ = os.Getwd()
 		log.Printf("Couldn't open dEQP config file from change (%v), falling back to internal version\n", p)
 	} else {
 		log.Println("Using dEQP config file from change")
 	}
-	file, err := os.Open(path.Join(srcDir, deqpConfigRelPath))
+	file, err := os.Open(path.Join(checkoutDir, deqpConfigRelPath))
 	if err != nil {
 		return deqpBuild{}, cause.Wrap(err, "Couldn't open dEQP config file")
 	}
@@ -488,7 +499,7 @@
 
 		log.Println("Applying deqp patches")
 		for _, patch := range cfg.Patches {
-			fullPath := path.Join(srcDir, patch)
+			fullPath := path.Join(checkoutDir, patch)
 			if err := git.Apply(cacheDir, fullPath); err != nil {
 				return deqpBuild{}, cause.Wrap(err, "Couldn't apply deqp patch %v for %v @ %v", patch, cfg.Remote, cfg.SHA)
 			}
@@ -595,8 +606,22 @@
 	return results, nil
 }
 
-func (r *regres) updateTestLists(client *gerrit.Client, reactorBackend reactorBackend) error {
-	log.Printf("Updating test lists (reactorBackend: %v)\n", reactorBackend)
+// runDaily runs a full deqp run on the HEAD change, posting the results to a
+// new or existing gerrit change. If genCov is true, then coverage
+// information will be generated for the run, and commiteed to the
+// coverageBranch.
+func (r *regres) runDaily(client *gerrit.Client, reactorBackend reactorBackend, genCov bool) error {
+	log.Printf("Updating test lists (Backend: %v)\n", reactorBackend)
+
+	if genCov {
+		if r.githubUser == "" {
+			log.Println("--gh-user not specified and SS_GITHUB_USER not set. Disabling code coverage generation")
+			genCov = false
+		} else if r.githubPass == "" {
+			log.Println("--gh-pass not specified and SS_GITHUB_PASS not set. Disabling code coverage generation")
+			genCov = false
+		}
+	}
 
 	dailyHash := git.Hash{}
 	if r.dailyChange == "" {
@@ -629,6 +654,15 @@
 		return cause.Wrap(err, "Failed to load full test lists for '%s'", dailyHash)
 	}
 
+	if genCov {
+		test.coverageEnv = &cov.Env{
+			LLVM:     *r.toolchain,
+			RootDir:  test.checkoutDir,
+			ExePath:  filepath.Join(test.buildDir, "libvk_swiftshader.so"),
+			TurboCov: filepath.Join(test.buildDir, "turbo-cov"),
+		}
+	}
+
 	// Build the change.
 	if err := test.build(); err != nil {
 		return cause.Wrap(err, "Failed to build '%s'", dailyHash)
@@ -649,7 +683,7 @@
 	// Stage all the updated test files.
 	for _, path := range filePaths {
 		log.Println("Staging", path)
-		if err := git.Add(test.srcDir, path); err != nil {
+		if err := git.Add(test.checkoutDir, path); err != nil {
 			return err
 		}
 	}
@@ -669,7 +703,7 @@
 		commitMsg.WriteString("Change-Id: " + existingChange.ChangeID + "\n")
 	}
 
-	if err := git.Commit(test.srcDir, commitMsg.String(), git.CommitFlags{
+	if err := git.Commit(test.checkoutDir, commitMsg.String(), git.CommitFlags{
 		Name:  "SwiftShader Regression Bot",
 		Email: r.gerritEmail,
 	}); err != nil {
@@ -680,7 +714,7 @@
 		log.Printf("DRY RUN: post results for review")
 	} else {
 		log.Println("Pushing test results for review")
-		if err := git.Push(test.srcDir, gitURL, "HEAD", "refs/for/master", git.PushFlags{
+		if err := git.Push(test.checkoutDir, gitURL, "HEAD", "refs/for/master", git.PushFlags{
 			Username: r.gerritUser,
 			Password: r.gerritPass,
 		}); err != nil {
@@ -690,9 +724,9 @@
 	}
 
 	// We've just pushed a new commit. Let's reset back to the parent commit
-	// (dailyHash), so that we can run updateTestLists again for another backend,
+	// (dailyHash), so that we can run runDaily again for another backend,
 	// and have it update the commit with the same change-id.
-	if err := git.CheckoutCommit(test.srcDir, dailyHash); err != nil {
+	if err := git.CheckoutCommit(test.checkoutDir, dailyHash); err != nil {
 		return cause.Wrap(err, "Failed to checkout parent commit")
 	}
 	log.Println("Checked out parent commit")
@@ -706,6 +740,73 @@
 		return err
 	}
 
+	if genCov {
+		if err := r.commitCoverage(results.Coverage, dailyHash); err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (r *regres) commitCoverage(cov *cov.Tree, revision git.Hash) error {
+	log.Printf("Committing coverage for %v\n", revision.String())
+
+	url := coverageURL
+	url = strings.ReplaceAll(url, "$USERNAME", r.githubUser)
+	url = strings.ReplaceAll(url, "$PASSWORD", r.githubPass)
+
+	dir := filepath.Join(r.cacheRoot, "coverage")
+	defer os.RemoveAll(dir)
+	if err := git.CheckoutRemoteBranch(dir, url, coverageBranch); err != nil {
+		return fmt.Errorf("Failed to checkout gh-pages branch: %v", err)
+	}
+
+	filePath := filepath.Join(dir, "coverage.zip")
+	file, err := os.Create(filePath)
+	if err != nil {
+		return fmt.Errorf("Failed to create file '%s': %v", filePath, err)
+	}
+	defer file.Close()
+
+	coverage := cov.JSON(revision.String())
+
+	zw := zip.NewWriter(file)
+	zfw, err := zw.Create("coverage.json")
+	if err != nil {
+		return fmt.Errorf("Failed to create 'coverage.json' file in zip: %v", err)
+	}
+	if _, err := io.Copy(zfw, strings.NewReader(coverage)); err != nil {
+		return fmt.Errorf("Failed to compress coverage datas: %v", err)
+	}
+	if err := zw.Close(); err != nil {
+		return fmt.Errorf("Failed to close zip file: %v", err)
+	}
+	file.Close()
+
+	if err := git.Add(dir, filePath); err != nil {
+		return fmt.Errorf("Failed to git add '%s': %v", filePath, err)
+	}
+
+	shortHash := revision.String()[:8]
+
+	err = git.Commit(dir, "Update coverage data @ "+shortHash, git.CommitFlags{
+		Name:  "SwiftShader Regression Bot",
+		Email: r.gerritEmail,
+	})
+	if err != nil {
+		return fmt.Errorf("Failed to 'git commit': %v", err)
+	}
+
+	if !r.dryRun {
+		err = git.Push(dir, url, coverageBranch, coverageBranch, git.PushFlags{})
+		if err != nil {
+			return fmt.Errorf("Failed to 'git push': %v", err)
+		}
+	}
+
+	log.Printf("Coverage for %v pushed to Github\n", shortHash)
+
 	return nil
 }
 
@@ -887,14 +988,14 @@
 }
 
 func (r *regres) newTest(commit git.Hash) *test {
-	srcDir := filepath.Join(r.cacheRoot, "src", commit.String())
+	checkoutDir := filepath.Join(r.cacheRoot, "checkout", commit.String())
 	resDir := filepath.Join(r.cacheRoot, "res", commit.String())
 	return &test{
 		r:              r,
 		commit:         commit,
-		srcDir:         srcDir,
+		checkoutDir:    checkoutDir,
 		resDir:         resDir,
-		buildDir:       filepath.Join(srcDir, "build"),
+		buildDir:       filepath.Join(checkoutDir, "build"),
 		reactorBackend: backendLLVM,
 	}
 }
@@ -914,29 +1015,30 @@
 type test struct {
 	r              *regres
 	commit         git.Hash       // hash of the commit to test
-	srcDir         string         // directory for the SwiftShader checkout
+	checkoutDir    string         // directory for the SwiftShader checkout
 	resDir         string         // directory for the test results
 	buildDir       string         // directory for SwiftShader build
 	toolchain      llvm.Toolchain // the toolchain used for building
 	reactorBackend reactorBackend // backend for SwiftShader build
+	coverageEnv    *cov.Env       // coverage generation environment (optional).
 }
 
 // cleanup removes any temporary files used by the test.
 func (t *test) cleanup() {
-	if t.srcDir != "" && !t.r.keepCheckouts {
-		os.RemoveAll(t.srcDir)
+	if t.checkoutDir != "" && !t.r.keepCheckouts {
+		os.RemoveAll(t.checkoutDir)
 	}
 }
 
 // checkout clones the test's source commit into t.src.
 func (t *test) checkout() error {
-	if util.IsDir(t.srcDir) && t.r.keepCheckouts {
+	if util.IsDir(t.checkoutDir) && t.r.keepCheckouts {
 		log.Printf("Reusing source cache for commit '%s'\n", t.commit)
 		return nil
 	}
 	log.Printf("Checking out '%s'\n", t.commit)
-	os.RemoveAll(t.srcDir)
-	if err := git.CheckoutRemoteCommit(t.srcDir, gitURL, t.commit); err != nil {
+	os.RemoveAll(t.checkoutDir)
+	if err := git.CheckoutRemoteCommit(t.checkoutDir, gitURL, t.commit); err != nil {
 		return cause.Wrap(err, "Checking out commit '%s'", t.commit)
 	}
 	log.Printf("Checked out commit '%s'\n", t.commit)
@@ -972,13 +1074,21 @@
 		return cause.Wrap(err, "Failed to create build directory")
 	}
 
-	if err := shell.Env(buildTimeout, t.r.cmake, t.buildDir, t.r.toolchainEnv(),
-		"-DCMAKE_BUILD_TYPE=Release",
-		"-DSWIFTSHADER_DCHECK_ALWAYS_ON=1",
-		"-DREACTOR_VERIFY_LLVM_IR=1",
-		"-DREACTOR_BACKEND="+string(t.reactorBackend),
-		"-DSWIFTSHADER_WARNINGS_AS_ERRORS=0",
-		".."); err != nil {
+	args := []string{
+		`..`,
+		`-DCMAKE_BUILD_TYPE=Release`,
+		`-DSWIFTSHADER_DCHECK_ALWAYS_ON=1`,
+		`-DREACTOR_VERIFY_LLVM_IR=1`,
+		`-DREACTOR_BACKEND=` + string(t.reactorBackend),
+		`-DSWIFTSHADER_LLVM_VERSION=10.0`,
+		`-DSWIFTSHADER_WARNINGS_AS_ERRORS=0`,
+	}
+
+	if t.coverageEnv != nil {
+		args = append(args, "-DSWIFTSHADER_EMIT_COVERAGE=1")
+	}
+
+	if err := shell.Env(buildTimeout, t.r.cmake, t.buildDir, t.r.toolchainEnv(), args...); err != nil {
 		return err
 	}
 
@@ -1002,15 +1112,26 @@
 		return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDJSON)
 	}
 
-	if *limit != 0 && len(testLists) > *limit {
-		testLists = testLists[:*limit]
+	if *limit != 0 {
+		log.Printf("Limiting tests to %d\n", *limit)
+		testLists = append(testlist.Lists{}, testLists...)
+		for i := range testLists {
+			testLists[i] = testLists[i].Limit(*limit)
+		}
 	}
 
+	// Directory for per-test small transient files, such as log files,
+	// coverage output, etc.
+	// TODO(bclayton): consider using tmpfs here.
+	tempDir := filepath.Join(t.buildDir, "temp")
+	os.MkdirAll(tempDir, 0777)
+
 	config := deqp.Config{
 		ExeEgl:    filepath.Join(d.path, "build", "modules", "egl", "deqp-egl"),
 		ExeGles2:  filepath.Join(d.path, "build", "modules", "gles2", "deqp-gles2"),
 		ExeGles3:  filepath.Join(d.path, "build", "modules", "gles3", "deqp-gles3"),
 		ExeVulkan: filepath.Join(d.path, "build", "external", "vulkancts", "modules", "vulkan", "deqp-vk"),
+		TempDir:   tempDir,
 		TestLists: testLists,
 		Env: []string{
 			"LD_LIBRARY_PATH=" + t.buildDir + ":" + os.Getenv("LD_LIBRARY_PATH"),
@@ -1019,10 +1140,11 @@
 			"LIBC_FATAL_STDERR_=1", // Put libc explosions into logs.
 		},
 		LogReplacements: map[string]string{
-			t.srcDir: "<SwiftShader>",
+			t.checkoutDir: "<SwiftShader>",
 		},
 		NumParallelTests: numParallelTests,
 		TestTimeout:      testTimeout,
+		CoverageEnv:      t.coverageEnv,
 	}
 
 	return config.Run()
@@ -1034,7 +1156,7 @@
 	for _, list := range testLists {
 		files := map[testlist.Status]*os.File{}
 		for _, status := range testlist.Statuses {
-			path := testlist.FilePathWithStatus(filepath.Join(t.srcDir, list.File), status)
+			path := testlist.FilePathWithStatus(filepath.Join(t.checkoutDir, list.File), status)
 			dir := filepath.Dir(path)
 			os.MkdirAll(dir, 0777)
 			f, err := os.Create(path)
@@ -1296,9 +1418,9 @@
 // a default set.
 func (t *test) loadTestLists(relPath string) (testlist.Lists, error) {
 	// Seach for the test.json file in the checked out source directory.
-	if path := filepath.Join(t.srcDir, relPath); util.IsFile(path) {
+	if path := filepath.Join(t.checkoutDir, relPath); util.IsFile(path) {
 		log.Printf("Loading test list '%v' from commit\n", relPath)
-		return testlist.Load(t.srcDir, path)
+		return testlist.Load(t.checkoutDir, path)
 	}
 
 	// Not found there. Search locally.
diff --git a/tests/regres/cmd/run_testlist/main.go b/tests/regres/cmd/run_testlist/main.go
index 51b17d0..2414447 100644
--- a/tests/regres/cmd/run_testlist/main.go
+++ b/tests/regres/cmd/run_testlist/main.go
@@ -132,7 +132,7 @@
 	}
 
 	if *genCoverage {
-		if err := ioutil.WriteFile("coverage.json", []byte(res.Coverage.JSON()), 0666); err != nil {
+		if err := ioutil.WriteFile("coverage.json", []byte(res.Coverage.JSON("master")), 0666); err != nil {
 			return err
 		}
 	}
diff --git a/tests/regres/cov/coverage.go b/tests/regres/cov/coverage.go
index afcafe3..736ee0d 100644
--- a/tests/regres/cov/coverage.go
+++ b/tests/regres/cov/coverage.go
@@ -85,24 +85,24 @@
 	}
 	defer os.Remove(profdata)
 
-	args := []string{
-		"export",
-		e.ExePath,
-		"-instr-profile=" + profdata,
-		"-format=text",
-	}
-	if e.LLVM.Version.GreaterEqual(llvm.Version{Major: 9}) {
-		// LLVM 9 has new flags that omit stuff we don't care about.
-		args = append(args,
-			"-skip-expansions",
-			"-skip-functions",
-		)
-	}
-
 	if e.TurboCov == "" {
+		args := []string{
+			"export",
+			e.ExePath,
+			"-instr-profile=" + profdata,
+			"-format=text",
+		}
+		if e.LLVM.Version.GreaterEqual(llvm.Version{Major: 9}) {
+			// LLVM 9 has new flags that omit stuff we don't care about.
+			args = append(args,
+				"-skip-expansions",
+				"-skip-functions",
+			)
+		}
+
 		data, err := exec.Command(e.LLVM.Cov(), args...).Output()
 		if err != nil {
-			return nil, cause.Wrap(err, "llvm-cov errored: %v", string(data))
+			return nil, cause.Wrap(err, "llvm-cov errored: %v", string(err.(*exec.ExitError).Stderr))
 		}
 		cov, err := e.parseCov(data)
 		if err != nil {
@@ -113,7 +113,7 @@
 
 	data, err := exec.Command(e.TurboCov, e.ExePath, profdata).Output()
 	if err != nil {
-		return nil, cause.Wrap(err, "turbo-cov errored: %v", string(data))
+		return nil, cause.Wrap(err, "turbo-cov errored: %v", string(err.(*exec.ExitError).Stderr))
 	}
 	cov, err := e.parseTurboCov(data)
 	if err != nil {
@@ -566,12 +566,15 @@
 }
 
 // JSON returns the full test tree serialized to JSON.
-func (t *Tree) JSON() string {
+func (t *Tree) JSON(revision string) string {
 	sb := &strings.Builder{}
 	sb.WriteString(`{`)
 
+	// write the revision
+	sb.WriteString(`"r":"` + revision + `"`)
+
 	// write the strings
-	sb.WriteString(`"n":[`)
+	sb.WriteString(`,"n":[`)
 	for i, s := range t.strings.s {
 		if i > 0 {
 			sb.WriteString(`,`)
diff --git a/tests/regres/cov/turbo-cov/main.cpp b/tests/regres/cov/turbo-cov/main.cpp
index 58fd4c4..4aabd5c 100644
--- a/tests/regres/cov/turbo-cov/main.cpp
+++ b/tests/regres/cov/turbo-cov/main.cpp
@@ -47,7 +47,7 @@
 {
 	if(argc < 3)
 	{
-		fprintf(stderr, "llvm-cov-bin <exe> <profdata>\n");
+		fprintf(stderr, "turbo-cov <exe> <profdata>\n");
 		return 1;
 	}
 
diff --git a/tests/regres/deqp/deqp.go b/tests/regres/deqp/deqp.go
index fa5dc8f..e44be99 100644
--- a/tests/regres/deqp/deqp.go
+++ b/tests/regres/deqp/deqp.go
@@ -158,7 +158,8 @@
 	// For each API that we are testing
 	for _, list := range c.TestLists {
 		// Resolve the test runner
-		var exe string
+		exe, supportsCoverage := "", false
+
 		switch list.API {
 		case testlist.EGL:
 			exe = c.ExeEgl
@@ -167,7 +168,7 @@
 		case testlist.GLES3:
 			exe = c.ExeGles3
 		case testlist.Vulkan:
-			exe = c.ExeVulkan
+			exe, supportsCoverage = c.ExeVulkan, true
 		default:
 			return nil, fmt.Errorf("Unknown API '%v'", list.API)
 		}
@@ -182,7 +183,7 @@
 		wg.Add(c.NumParallelTests)
 		for i := 0; i < c.NumParallelTests; i++ {
 			go func(index int) {
-				c.TestRoutine(exe, tests, results, index)
+				c.TestRoutine(exe, tests, results, index, supportsCoverage)
 				wg.Done()
 			}(goroutineIndex)
 			goroutineIndex++
@@ -255,7 +256,7 @@
 // is written to results.
 // TestRoutine only returns once the tests chan has been closed.
 // TestRoutine does not close the results chan.
-func (c *Config) TestRoutine(exe string, tests <-chan string, results chan<- TestResult, goroutineIndex int) {
+func (c *Config) TestRoutine(exe string, tests <-chan string, results chan<- TestResult, goroutineIndex int, supportsCoverage bool) {
 	// Context for the GCOV_PREFIX environment variable:
 	// If you compile SwiftShader with gcc and the --coverage flag, the build will contain coverage instrumentation.
 	// We can use this to get the code coverage of SwiftShader from running dEQP.
@@ -286,8 +287,10 @@
 	}
 
 	coverageFile := filepath.Join(c.TempDir, fmt.Sprintf("%v.profraw", goroutineIndex))
-	if c.CoverageEnv != nil {
-		env = cov.AppendRuntimeEnv(env, coverageFile)
+	if supportsCoverage {
+		if c.CoverageEnv != nil {
+			env = cov.AppendRuntimeEnv(env, coverageFile)
+		}
 	}
 	logPath := filepath.Join(c.TempDir, fmt.Sprintf("%v.log", goroutineIndex))
 
@@ -312,10 +315,10 @@
 		}
 
 		var coverage *cov.Coverage
-		if c.CoverageEnv != nil {
+		if c.CoverageEnv != nil && supportsCoverage { // IsFile() check here is for GLES tests that don't emit coverage.
 			coverage, err = c.CoverageEnv.Import(coverageFile)
 			if err != nil {
-				log.Printf("Warning: Failed to get test coverage for test '%v'. %v", name, err)
+				log.Printf("Warning: Failed to process test coverage for test '%v'. %v", name, err)
 			}
 			os.Remove(coverageFile)
 		}
diff --git a/tests/regres/git/git.go b/tests/regres/git/git.go
index 6598bd4..33352b2 100644
--- a/tests/regres/git/git.go
+++ b/tests/regres/git/git.go
@@ -120,10 +120,8 @@
 	for _, cmds := range [][]string{
 		{"init"},
 		{"remote", "add", "origin", url},
-		// Note: this depth is here to prevent massive dEQP checkouts that can
-		// take all day. If the commit cannot be found in the checked out branch
-		// then this limit may need to be increased.
-		{"fetch", "origin", "--depth=99", branch},
+		{"fetch", "origin", "--depth=1", branch},
+		{"checkout", branch},
 	} {
 		if err := shell.Shell(gitTimeout, exe, path, cmds...); err != nil {
 			os.RemoveAll(path)
diff --git a/tests/regres/run.sh b/tests/regres/run.sh
index 7dd18f4..51bb6f3 100755
--- a/tests/regres/run.sh
+++ b/tests/regres/run.sh
@@ -2,4 +2,4 @@
 
 ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )"
 
-go run $ROOT_DIR/cmd/regres/main.go $@
+go run $ROOT_DIR/cmd/regres/main.go $@ 2>&1 | tee regres-log.txt
diff --git a/tests/regres/testlist/testlist.go b/tests/regres/testlist/testlist.go
index a9f5889..22e72c4 100644
--- a/tests/regres/testlist/testlist.go
+++ b/tests/regres/testlist/testlist.go
@@ -79,6 +79,20 @@
 	return out
 }
 
+// Limit returns a new Group that contains a maximum of limit tests.
+func (g Group) Limit(limit int) Group {
+	out := Group{
+		Name:  g.Name,
+		File:  g.File,
+		API:   g.API,
+		Tests: g.Tests,
+	}
+	if len(g.Tests) > limit {
+		out.Tests = g.Tests[:limit]
+	}
+	return out
+}
+
 // Lists is the full list of tests to be run.
 type Lists []Group