1// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//    http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15// regres is a tool that detects test regressions with SwiftShader changes.
16//
17// Regres monitors changes that have been put up for review with Gerrit.
18// Once a new patchset has been found, regres will checkout, build and test the
19// change against the parent changelist. Any differences in results are reported
20// as a review comment on the change.
21//
22// Once a day regres will also test another, larger set of tests, and post the
23// full test results as a Gerrit changelist. The CI test lists can be based from
24// this daily test list, so testing can be limited to tests that were known to
25// pass.
26package main
27
28import (
29	"crypto/sha1"
30	"encoding/hex"
31	"encoding/json"
32	"errors"
33	"flag"
34	"fmt"
35	"io/ioutil"
36	"log"
37	"math"
38	"os"
39	"os/exec"
40	"path"
41	"path/filepath"
42	"regexp"
43	"runtime"
44	"sort"
45	"strings"
46	"time"
47
48	"../../cause"
49	"../../consts"
50	"../../cov"
51	"../../deqp"
52	"../../git"
53	"../../llvm"
54	"../../shell"
55	"../../testlist"
56	"../../util"
57
58	gerrit "github.com/andygrunwald/go-gerrit"
59)
60
61const (
62	gitURL                = "https://swiftshader.googlesource.com/SwiftShader"
63	gerritURL             = "https://swiftshader-review.googlesource.com/"
64	coverageURL           = "https://$USERNAME:$PASSWORD@github.com/swiftshader-regres/swiftshader-coverage.git"
65	coverageBranch        = "gh-pages"
66	coveragePath          = "coverage/coverage.zip"
67	reportHeader          = "Regres report:"
68	changeUpdateFrequency = time.Minute * 5
69	changeQueryFrequency  = time.Minute * 5
70	testTimeout           = time.Minute * 2  // timeout for a single test
71	buildTimeout          = time.Minute * 10 // timeout for a build
72	fullTestListRelPath   = "tests/regres/full-tests.json"
73	ciTestListRelPath     = "tests/regres/ci-tests.json"
74	deqpConfigRelPath     = "tests/regres/deqp.json"
75)
76
77var (
78	numParallelTests = runtime.NumCPU()
79	llvmVersion      = llvm.Version{Major: 10}
80
81	cacheDir      = flag.String("cache", "cache", "path to the output cache directory")
82	gerritEmail   = flag.String("email", "$SS_REGRES_EMAIL", "gerrit email address for posting regres results")
83	gerritUser    = flag.String("user", "$SS_REGRES_USER", "gerrit username for posting regres results")
84	gerritPass    = flag.String("pass", "$SS_REGRES_PASS", "gerrit password for posting regres results")
85	githubUser    = flag.String("gh-user", "$SS_GITHUB_USER", "github user for posting coverage results")
86	githubPass    = flag.String("gh-pass", "$SS_GITHUB_PASS", "github password for posting coverage results")
87	keepCheckouts = flag.Bool("keep", false, "don't delete checkout directories after use")
88	dryRun        = flag.Bool("dry", false, "don't post regres reports to gerrit")
89	maxProcMemory = flag.Uint64("max-proc-mem", shell.MaxProcMemory, "maximum virtual memory per child process")
90	dailyNow      = flag.Bool("dailynow", false, "Start by running the daily pass")
91	dailyOnly     = flag.Bool("dailyonly", false, "Run only the daily pass")
92	dailyChange   = flag.String("dailychange", "", "Change hash to use for daily pass, HEAD if not provided")
93	priority      = flag.String("priority", "", "Prioritize a single change with the given id")
94	limit         = flag.Int("limit", 0, "only run a maximum of this number of tests")
95)
96
97func main() {
98	flag.ErrHelp = errors.New("regres is a tool to detect regressions between versions of SwiftShader")
99	flag.Parse()
100
101	shell.MaxProcMemory = *maxProcMemory
102
103	r := regres{
104		cacheRoot:     *cacheDir,
105		gerritEmail:   os.ExpandEnv(*gerritEmail),
106		gerritUser:    os.ExpandEnv(*gerritUser),
107		gerritPass:    os.ExpandEnv(*gerritPass),
108		githubUser:    os.ExpandEnv(*githubUser),
109		githubPass:    os.ExpandEnv(*githubPass),
110		keepCheckouts: *keepCheckouts,
111		dryRun:        *dryRun,
112		dailyNow:      *dailyNow,
113		dailyOnly:     *dailyOnly,
114		dailyChange:   *dailyChange,
115		priority:      *priority,
116	}
117
118	if err := r.run(); err != nil {
119		fmt.Fprintln(os.Stderr, err)
120		os.Exit(-1)
121	}
122}
123
124type regres struct {
125	cmake         string          // path to cmake executable
126	make          string          // path to make executable
127	python        string          // path to python executable
128	tar           string          // path to tar executable
129	cacheRoot     string          // path to the regres cache directory
130	toolchain     *llvm.Toolchain // the LLVM toolchain used to build SwiftShader
131	gerritEmail   string          // gerrit email address used for posting results
132	gerritUser    string          // gerrit username used for posting results
133	gerritPass    string          // gerrit password used for posting results
134	githubUser    string          // github username used for posting results
135	githubPass    string          // github password used for posting results
136	keepCheckouts bool            // don't delete source & build checkouts after testing
137	dryRun        bool            // don't post any reviews
138	maxProcMemory uint64          // max virtual memory for child processes
139	dailyNow      bool            // start with a daily run
140	dailyOnly     bool            // run only the daily run
141	dailyChange   string          // Change hash to use for daily pass, HEAD if not provided
142	priority      string          // Prioritize a single change with the given id
143}
144
145// getToolchain returns the LLVM toolchain, possibly downloading and
146// decompressing it if it wasn't found in the cache directory.
147func getToolchain(tarExe, cacheRoot string) (*llvm.Toolchain, error) {
148	path := filepath.Join(cacheRoot, "llvm")
149
150	if toolchain := llvm.Search(path).Find(llvmVersion); toolchain != nil {
151		return toolchain, nil
152	}
153
154	// LLVM toolchain may have been updated, remove the directory if it exists.
155	os.RemoveAll(path)
156
157	log.Printf("Downloading LLVM %v toolchain...\n", llvmVersion)
158	tar, err := llvmVersion.Download()
159	if err != nil {
160		return nil, fmt.Errorf("Couldn't download LLVM %v: %v", llvmVersion, err)
161	}
162
163	tarFile := filepath.Join(cacheRoot, "llvm.tar.xz")
164	if err := ioutil.WriteFile(tarFile, tar, 0666); err != nil {
165		return nil, fmt.Errorf("Couldn't write '%v': %v", tarFile, err)
166	}
167	defer os.Remove(tarFile)
168
169	log.Printf("Decompressing LLVM %v toolchain...\n", llvmVersion)
170	target := filepath.Join(cacheRoot, "llvm-tmp")
171	os.MkdirAll(target, 0755)
172	defer os.RemoveAll(target)
173	if err := exec.Command(tarExe, "-xf", tarFile, "-C", target).Run(); err != nil {
174		return nil, fmt.Errorf("Couldn't decompress LLVM tar download: %v", err)
175	}
176
177	// The tar, once decompressed, holds a single root directory with a name
178	// starting with 'clang+llvm'. Move this to path.
179	files, err := filepath.Glob(filepath.Join(target, "*"))
180	if err != nil {
181		return nil, fmt.Errorf("Couldn't glob decompressed files: %v", err)
182	}
183	if len(files) != 1 || !util.IsDir(files[0]) {
184		return nil, fmt.Errorf("Unexpected decompressed files: %+v", files)
185	}
186	if err := os.Rename(files[0], path); err != nil {
187		return nil, fmt.Errorf("Couldn't move %v to %v", files[0], path)
188	}
189
190	// We should now have everything in the right place.
191	toolchain := llvm.Search(path).Find(llvmVersion)
192	if toolchain == nil {
193		return nil, fmt.Errorf("Couldn't find LLVM toolchain after downloading")
194	}
195
196	return toolchain, nil
197}
198
199// toolchainEnv() returns the environment variables for executing CMake commands.
200func (r *regres) toolchainEnv() []string {
201	return append([]string{
202		"CC=" + r.toolchain.Clang(),
203		"CXX=" + r.toolchain.ClangXX(),
204	}, os.Environ()...)
205}
206
207// resolveDirs ensures that the necessary directories used can be found, and
208// expands them to absolute paths.
209func (r *regres) resolveDirs() error {
210	allDirs := []*string{
211		&r.cacheRoot,
212	}
213
214	for _, path := range allDirs {
215		abs, err := filepath.Abs(*path)
216		if err != nil {
217			return cause.Wrap(err, "Couldn't find path '%v'", *path)
218		}
219		*path = abs
220	}
221
222	if err := os.MkdirAll(r.cacheRoot, 0777); err != nil {
223		return cause.Wrap(err, "Couldn't create cache root directory")
224	}
225
226	for _, path := range allDirs {
227		if !util.IsDir(*path) {
228			return fmt.Errorf("Couldn't find path '%v'", *path)
229		}
230	}
231
232	return nil
233}
234
235// resolveExes resolves all external executables used by regres.
236func (r *regres) resolveExes() error {
237	type exe struct {
238		name string
239		path *string
240	}
241	for _, e := range []exe{
242		{"cmake", &r.cmake},
243		{"make", &r.make},
244		{"python", &r.python},
245		{"tar", &r.tar},
246	} {
247		path, err := exec.LookPath(e.name)
248		if err != nil {
249			return cause.Wrap(err, "Couldn't find path to %s", e.name)
250		}
251		*e.path = path
252	}
253	return nil
254}
255
256// run performs the main processing loop for the regress tool. It:
257// * Scans for open and recently updated changes in gerrit using queryChanges()
258//   and changeInfo.update().
259// * Builds the most recent patchset and the commit's parent CL using
260//   r.newTest(<hash>).lazyRun().
261// * Compares the results of the tests using compare().
262// * Posts the results of the compare to gerrit as a review.
263// * Repeats the above steps until the process is interrupted.
264func (r *regres) run() error {
265	if err := r.resolveExes(); err != nil {
266		return cause.Wrap(err, "Couldn't resolve all exes")
267	}
268
269	if err := r.resolveDirs(); err != nil {
270		return cause.Wrap(err, "Couldn't resolve all directories")
271	}
272
273	toolchain, err := getToolchain(r.tar, r.cacheRoot)
274	if err != nil {
275		return cause.Wrap(err, "Couldn't download LLVM toolchain")
276	}
277	r.toolchain = toolchain
278
279	client, err := gerrit.NewClient(gerritURL, nil)
280	if err != nil {
281		return cause.Wrap(err, "Couldn't create gerrit client")
282	}
283	if r.gerritUser != "" {
284		client.Authentication.SetBasicAuth(r.gerritUser, r.gerritPass)
285	}
286
287	changes := map[string]*changeInfo{} // Change ID -> changeInfo
288	lastUpdatedTestLists := toDate(time.Now())
289	lastQueriedChanges := time.Time{}
290
291	if r.dailyNow || r.dailyOnly {
292		lastUpdatedTestLists = date{}
293	}
294
295	for {
296		if now := time.Now(); toDate(now) != lastUpdatedTestLists {
297			lastUpdatedTestLists = toDate(now)
298			if err := r.runDaily(client, backendLLVM, false); err != nil {
299				log.Println(err.Error())
300			}
301			if err := r.runDaily(client, backendSubzero, true); err != nil {
302				log.Println(err.Error())
303			}
304		}
305
306		if r.dailyOnly {
307			log.Println("Daily finished with --dailyonly. Stopping")
308			return nil
309		}
310
311		// Update list of tracked changes.
312		if time.Since(lastQueriedChanges) > changeQueryFrequency {
313			lastQueriedChanges = time.Now()
314			if err := queryChanges(client, changes); err != nil {
315				log.Println(err.Error())
316			}
317		}
318
319		// Update change info.
320		for _, change := range changes {
321			if time.Since(change.lastUpdated) > changeUpdateFrequency {
322				change.lastUpdated = time.Now()
323				err := change.update(client)
324				if err != nil {
325					log.Println(cause.Wrap(err, "Couldn't update info for change '%s'", change.id))
326				}
327			}
328		}
329
330		for _, c := range changes {
331			if c.pending && r.priority == c.id {
332				log.Printf("Prioritizing change '%s'\n", c.id)
333				c.priority = 1e6
334			}
335		}
336
337		// Find the change with the highest priority.
338		var change *changeInfo
339		numPending := 0
340		for _, c := range changes {
341			if c.pending {
342				numPending++
343				if change == nil || c.priority > change.priority {
344					change = c
345				}
346			}
347		}
348
349		if change == nil {
350			// Everything up to date. Take a break.
351			log.Println("Nothing to do. Sleeping")
352			time.Sleep(time.Minute)
353			continue
354		}
355
356		log.Printf("%d changes queued for testing\n", numPending)
357
358		log.Printf("Testing change '%s'\n", change.id)
359
360		// Test the latest patchset in the change, diff against parent change.
361		msg, alert, err := r.test(change)
362		if err != nil {
363			log.Println(cause.Wrap(err, "Failed to test changelist '%s'", change.latest))
364			time.Sleep(time.Minute)
365			change.pending = false
366			continue
367		}
368
369		// Always include the reportHeader in the message.
370		// changeInfo.update() uses this header to detect whether a patchset has
371		// already got a test result.
372		msg = reportHeader + "\n\n" + msg
373
374		// Limit the message length to prevent '400 Bad Request' response.
375		maxMsgLength := 16000
376		if len(msg) > maxMsgLength {
377			trunc := " [truncated]\n"
378			msg = msg[0:maxMsgLength-len(trunc)] + trunc
379		}
380
381		if r.dryRun {
382			log.Printf("DRY RUN: add review to change '%v':\n%v\n", change.id, msg)
383		} else {
384			log.Printf("Posting review to '%s'\n", change.id)
385			notify := "OWNER"
386			if alert {
387				notify = "OWNER_REVIEWERS"
388			}
389			_, _, err = client.Changes.SetReview(change.id, change.latest.String(), &gerrit.ReviewInput{
390				Message: msg,
391				Tag:     "autogenerated:regress",
392				Notify:  notify,
393			})
394			if err != nil {
395				return cause.Wrap(err, "Failed to post comments on change '%s'", change.id)
396			}
397		}
398		change.pending = false
399	}
400}
401
402func (r *regres) test(change *changeInfo) (string, bool, error) {
403	latest := r.newTest(change.latest)
404	defer latest.cleanup()
405
406	if err := latest.checkout(); err != nil {
407		return "", true, cause.Wrap(err, "Failed to checkout '%s'", change.latest)
408	}
409
410	deqpBuild, err := r.getOrBuildDEQP(latest)
411	if err != nil {
412		return "", true, cause.Wrap(err, "Failed to build dEQP '%v' for change", change.id)
413	}
414
415	log.Printf("Testing latest patchset for change '%s'\n", change.id)
416	latestResults, testlists, err := r.testLatest(change, latest, deqpBuild)
417	if err != nil {
418		return "", true, cause.Wrap(err, "Failed to test latest change of '%v'", change.id)
419	}
420
421	log.Printf("Testing parent of change '%s'\n", change.id)
422	parentResults, err := r.testParent(change, testlists, deqpBuild)
423	if err != nil {
424		return "", true, cause.Wrap(err, "Failed to test parent change of '%v'", change.id)
425	}
426
427	log.Println("Comparing latest patchset's results with parent")
428	msg, alert := compare(parentResults, latestResults)
429
430	return msg, alert, nil
431}
432
433type deqpBuild struct {
434	path string // path to deqp directory
435	hash string // hash of the deqp config
436}
437
438func (r *regres) getOrBuildDEQP(test *test) (deqpBuild, error) {
439	checkoutDir := test.checkoutDir
440	if p := path.Join(checkoutDir, deqpConfigRelPath); !util.IsFile(p) {
441		checkoutDir, _ = os.Getwd()
442		log.Printf("Couldn't open dEQP config file from change (%v), falling back to internal version\n", p)
443	} else {
444		log.Println("Using dEQP config file from change")
445	}
446	file, err := os.Open(path.Join(checkoutDir, deqpConfigRelPath))
447	if err != nil {
448		return deqpBuild{}, cause.Wrap(err, "Couldn't open dEQP config file")
449	}
450	defer file.Close()
451
452	cfg := struct {
453		Remote  string   `json:"remote"`
454		Branch  string   `json:"branch"`
455		SHA     string   `json:"sha"`
456		Patches []string `json:"patches"`
457	}{}
458	if err := json.NewDecoder(file).Decode(&cfg); err != nil {
459		return deqpBuild{}, cause.Wrap(err, "Couldn't parse %s", deqpConfigRelPath)
460	}
461
462	hasher := sha1.New()
463	if err := json.NewEncoder(hasher).Encode(&cfg); err != nil {
464		return deqpBuild{}, cause.Wrap(err, "Couldn't re-encode %s", deqpConfigRelPath)
465	}
466	hash := hex.EncodeToString(hasher.Sum(nil))
467	cacheDir := path.Join(r.cacheRoot, "deqp", hash)
468	buildDir := path.Join(cacheDir, "build")
469	if !util.IsDir(cacheDir) {
470		if err := os.MkdirAll(cacheDir, 0777); err != nil {
471			return deqpBuild{}, cause.Wrap(err, "Couldn't make deqp cache directory '%s'", cacheDir)
472		}
473
474		success := false
475		defer func() {
476			if !success {
477				os.RemoveAll(cacheDir)
478			}
479		}()
480
481		if cfg.Branch != "" {
482			// If a branch is specified, then fetch the branch then checkout the
483			// commit by SHA. This is a workaround for git repos that error when
484			// attempting to directly checkout a remote commit.
485			log.Printf("Checking out deqp %v branch %v into %v\n", cfg.Remote, cfg.Branch, cacheDir)
486			if err := git.CheckoutRemoteBranch(cacheDir, cfg.Remote, cfg.Branch); err != nil {
487				return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp branch %v @ %v", cfg.Remote, cfg.Branch)
488			}
489			log.Printf("Checking out deqp %v commit %v \n", cfg.Remote, cfg.SHA)
490			if err := git.CheckoutCommit(cacheDir, git.ParseHash(cfg.SHA)); err != nil {
491				return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp commit %v @ %v", cfg.Remote, cfg.SHA)
492			}
493		} else {
494			log.Printf("Checking out deqp %v @ %v into %v\n", cfg.Remote, cfg.SHA, cacheDir)
495			if err := git.CheckoutRemoteCommit(cacheDir, cfg.Remote, git.ParseHash(cfg.SHA)); err != nil {
496				return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp commit %v @ %v", cfg.Remote, cfg.SHA)
497			}
498		}
499
500		log.Println("Fetching deqp dependencies")
501		if err := shell.Shell(buildTimeout, r.python, cacheDir, "external/fetch_sources.py"); err != nil {
502			return deqpBuild{}, cause.Wrap(err, "Couldn't fetch deqp sources %v @ %v", cfg.Remote, cfg.SHA)
503		}
504
505		log.Println("Applying deqp patches")
506		for _, patch := range cfg.Patches {
507			fullPath := path.Join(checkoutDir, patch)
508			if err := git.Apply(cacheDir, fullPath); err != nil {
509				return deqpBuild{}, cause.Wrap(err, "Couldn't apply deqp patch %v for %v @ %v", patch, cfg.Remote, cfg.SHA)
510			}
511		}
512
513		log.Printf("Building deqp into %v\n", buildDir)
514		if err := os.MkdirAll(buildDir, 0777); err != nil {
515			return deqpBuild{}, cause.Wrap(err, "Couldn't make deqp build directory '%v'", buildDir)
516		}
517
518		if err := shell.Shell(buildTimeout, r.cmake, buildDir,
519			"-DDEQP_TARGET=x11_egl",
520			"-DCMAKE_BUILD_TYPE=Release",
521			".."); err != nil {
522			return deqpBuild{}, cause.Wrap(err, "Couldn't generate build rules for deqp %v @ %v", cfg.Remote, cfg.SHA)
523		}
524
525		if err := shell.Shell(buildTimeout, r.make, buildDir, fmt.Sprintf("-j%d", runtime.NumCPU())); err != nil {
526			return deqpBuild{}, cause.Wrap(err, "Couldn't build deqp %v @ %v", cfg.Remote, cfg.SHA)
527		}
528
529		success = true
530	}
531
532	return deqpBuild{
533		path: cacheDir,
534		hash: hash,
535	}, nil
536}
537
538var additionalTestsRE = regexp.MustCompile(`\n\s*Test[s]?:\s*([^\s]+)[^\n]*`)
539
540func (r *regres) testLatest(change *changeInfo, test *test, d deqpBuild) (*deqp.Results, testlist.Lists, error) {
541	// Get the test results for the latest patchset in the change.
542	testlists, err := test.loadTestLists(ciTestListRelPath)
543	if err != nil {
544		return nil, nil, cause.Wrap(err, "Failed to load '%s'", change.latest)
545	}
546
547	if matches := additionalTestsRE.FindAllStringSubmatch(change.commitMessage, -1); len(matches) > 0 {
548		log.Println("Change description contains additional test patterns")
549
550		// Change specifies additional tests to try. Load the full test list.
551		fullTestLists, err := test.loadTestLists(fullTestListRelPath)
552		if err != nil {
553			return nil, nil, cause.Wrap(err, "Failed to load '%s'", change.latest)
554		}
555
556		// Add any tests in the full list that match the pattern to the list to test.
557		for _, match := range matches {
558			if len(match) > 1 {
559				pattern := match[1]
560				log.Printf("Adding custom tests with pattern '%s'\n", pattern)
561				filtered := fullTestLists.Filter(func(name string) bool {
562					ok, _ := filepath.Match(pattern, name)
563					return ok
564				})
565				testlists = append(testlists, filtered...)
566			}
567		}
568	}
569
570	cachePath := test.resultsCachePath(testlists, d)
571
572	if results, err := deqp.LoadResults(cachePath); err == nil {
573		return results, testlists, nil // Use cached results
574	}
575
576	// Build the change and test it.
577	results := test.buildAndRun(testlists, d)
578
579	// Cache the results for future tests
580	if err := results.Save(cachePath); err != nil {
581		log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath)
582	}
583
584	return results, testlists, nil
585}
586
587func (r *regres) testParent(change *changeInfo, testlists testlist.Lists, d deqpBuild) (*deqp.Results, error) {
588	// Get the test results for the changes's parent changelist.
589	test := r.newTest(change.parent)
590	defer test.cleanup()
591
592	cachePath := test.resultsCachePath(testlists, d)
593
594	if results, err := deqp.LoadResults(cachePath); err == nil {
595		return results, nil // Use cached results
596	}
597
598	// Couldn't load cached results. Have to build them.
599	if err := test.checkout(); err != nil {
600		return nil, cause.Wrap(err, "Failed to checkout '%s'", change.parent)
601	}
602
603	// Build the parent change and test it.
604	results := test.buildAndRun(testlists, d)
605
606	// Store the results of the parent change to the cache.
607	if err := results.Save(cachePath); err != nil {
608		log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath)
609	}
610
611	return results, nil
612}
613
614// runDaily runs a full deqp run on the HEAD change, posting the results to a
615// new or existing gerrit change. If genCov is true, then coverage
616// information will be generated for the run, and commiteed to the
617// coverageBranch.
618func (r *regres) runDaily(client *gerrit.Client, reactorBackend reactorBackend, genCov bool) error {
619	log.Printf("Updating test lists (Backend: %v)\n", reactorBackend)
620
621	if genCov {
622		if r.githubUser == "" {
623			log.Println("--gh-user not specified and SS_GITHUB_USER not set. Disabling code coverage generation")
624			genCov = false
625		} else if r.githubPass == "" {
626			log.Println("--gh-pass not specified and SS_GITHUB_PASS not set. Disabling code coverage generation")
627			genCov = false
628		}
629	}
630
631	dailyHash := git.Hash{}
632	if r.dailyChange == "" {
633		headHash, err := git.FetchRefHash("HEAD", gitURL)
634		if err != nil {
635			return cause.Wrap(err, "Could not get hash of master HEAD")
636		}
637		dailyHash = headHash
638	} else {
639		dailyHash = git.ParseHash(r.dailyChange)
640	}
641
642	return r.runDailyTest(dailyHash, reactorBackend, genCov,
643		func(test *test, testLists testlist.Lists, results *deqp.Results) error {
644			errs := []error{}
645
646			if err := r.postDailyResults(client, test, testLists, results, reactorBackend, dailyHash); err != nil {
647				errs = append(errs, err)
648			}
649
650			if genCov {
651				if err := r.postCoverageResults(results.Coverage, dailyHash); err != nil {
652					errs = append(errs, err)
653				}
654			}
655
656			return cause.Merge(errs...)
657		})
658}
659
660// runDailyTest performs the full deqp run on the HEAD change, calling
661// withResults with the test results.
662func (r *regres) runDailyTest(dailyHash git.Hash, reactorBackend reactorBackend, genCov bool, withResults func(*test, testlist.Lists, *deqp.Results) error) error {
663	// Get the full test results.
664	test := r.newTest(dailyHash).setReactorBackend(reactorBackend)
665	defer test.cleanup()
666
667	// Always need to checkout the change.
668	if err := test.checkout(); err != nil {
669		return cause.Wrap(err, "Failed to checkout '%s'", dailyHash)
670	}
671
672	d, err := r.getOrBuildDEQP(test)
673	if err != nil {
674		return cause.Wrap(err, "Failed to build deqp for '%s'", dailyHash)
675	}
676
677	// Load the test lists.
678	testLists, err := test.loadTestLists(fullTestListRelPath)
679	if err != nil {
680		return cause.Wrap(err, "Failed to load full test lists for '%s'", dailyHash)
681	}
682
683	if genCov {
684		test.coverageEnv = &cov.Env{
685			LLVM:     *r.toolchain,
686			RootDir:  test.checkoutDir,
687			ExePath:  filepath.Join(test.buildDir, "libvk_swiftshader.so"),
688			TurboCov: filepath.Join(test.buildDir, "turbo-cov"),
689		}
690	}
691
692	// Build the change.
693	if err := test.build(); err != nil {
694		return cause.Wrap(err, "Failed to build '%s'", dailyHash)
695	}
696
697	// Run the tests on the change.
698	results, err := test.run(testLists, d)
699	if err != nil {
700		return cause.Wrap(err, "Failed to test '%s'", dailyHash)
701	}
702
703	return withResults(test, testLists, results)
704}
705
706// postDailyResults posts the results of the daily full deqp run to gerrit as
707// a new change, or reusing an old, unsubmitted change.
708// This change contains the updated test lists, along with a summary of the
709// test results.
710func (r *regres) postDailyResults(
711	client *gerrit.Client,
712	test *test,
713	testLists testlist.Lists,
714	results *deqp.Results,
715	reactorBackend reactorBackend,
716	dailyHash git.Hash) error {
717
718	// Write out the test list status files.
719	filePaths, err := test.writeTestListsByStatus(testLists, results)
720	if err != nil {
721		return cause.Wrap(err, "Failed to write test lists by status")
722	}
723
724	// Stage all the updated test files.
725	for _, path := range filePaths {
726		log.Println("Staging", path)
727		if err := git.Add(test.checkoutDir, path); err != nil {
728			return err
729		}
730	}
731
732	log.Println("Checking for existing test list")
733	existingChange, err := r.findTestListChange(client)
734	if err != nil {
735		return err
736	}
737
738	commitMsg := strings.Builder{}
739	commitMsg.WriteString(consts.TestListUpdateCommitSubjectPrefix + dailyHash.String()[:8])
740	commitMsg.WriteString("\n\nReactor backend: " + string(reactorBackend))
741	if existingChange != nil {
742		// Reuse gerrit change ID if there's already a change up for review.
743		commitMsg.WriteString("\n\n")
744		commitMsg.WriteString("Change-Id: " + existingChange.ChangeID + "\n")
745	}
746
747	if err := git.Commit(test.checkoutDir, commitMsg.String(), git.CommitFlags{
748		Name:  "SwiftShader Regression Bot",
749		Email: r.gerritEmail,
750	}); err != nil {
751		return cause.Wrap(err, "Failed to commit test results")
752	}
753
754	if r.dryRun {
755		log.Printf("DRY RUN: post results for review")
756	} else {
757		log.Println("Pushing test results for review")
758		if err := git.Push(test.checkoutDir, gitURL, "HEAD", "refs/for/master", git.PushFlags{
759			Username: r.gerritUser,
760			Password: r.gerritPass,
761		}); err != nil {
762			return cause.Wrap(err, "Failed to push test results for review")
763		}
764		log.Println("Test results posted for review")
765	}
766
767	// We've just pushed a new commit. Let's reset back to the parent commit
768	// (dailyHash), so that we can run runDaily again for another backend,
769	// and have it update the commit with the same change-id.
770	if err := git.CheckoutCommit(test.checkoutDir, dailyHash); err != nil {
771		return cause.Wrap(err, "Failed to checkout parent commit")
772	}
773	log.Println("Checked out parent commit")
774
775	change, err := r.findTestListChange(client)
776	if err != nil {
777		return err
778	}
779
780	if err := r.postMostCommonFailures(client, change, results); err != nil {
781		return err
782	}
783
784	return nil
785}
786
787func (r *regres) postCoverageResults(cov *cov.Tree, revision git.Hash) error {
788	log.Printf("Committing coverage for %v\n", revision.String())
789
790	url := coverageURL
791	url = strings.ReplaceAll(url, "$USERNAME", r.githubUser)
792	url = strings.ReplaceAll(url, "$PASSWORD", r.githubPass)
793
794	dir := filepath.Join(r.cacheRoot, "coverage")
795	defer os.RemoveAll(dir)
796	if err := git.CheckoutRemoteBranch(dir, url, coverageBranch); err != nil {
797		return cause.Wrap(err, "Failed to checkout gh-pages branch")
798	}
799
800	filePath := filepath.Join(dir, "coverage.dat")
801	file, err := os.Create(filePath)
802	if err != nil {
803		return cause.Wrap(err, "Failed to create file '%s'", filePath)
804	}
805	defer file.Close()
806
807	if err := cov.Encode(revision.String(), file); err != nil {
808		return cause.Wrap(err, "Failed to encode coverage")
809	}
810	file.Close()
811
812	if err := git.Add(dir, filePath); err != nil {
813		return cause.Wrap(err, "Failed to git add '%s'", filePath)
814	}
815
816	shortHash := revision.String()[:8]
817
818	err = git.Commit(dir, "Update coverage data @ "+shortHash, git.CommitFlags{
819		Name:  "SwiftShader Regression Bot",
820		Email: r.gerritEmail,
821	})
822	if err != nil {
823		return cause.Wrap(err, "Failed to git commit")
824	}
825
826	if !r.dryRun {
827		err = git.Push(dir, url, coverageBranch, coverageBranch, git.PushFlags{})
828		if err != nil {
829			return cause.Wrap(err, "Failed to 'git push'")
830		}
831		log.Printf("Coverage for %v pushed to Github\n", shortHash)
832	}
833
834	return nil
835}
836
837// postMostCommonFailures posts the most common failure cases as a review
838// comment on the given change.
839func (r *regres) postMostCommonFailures(client *gerrit.Client, change *gerrit.ChangeInfo, results *deqp.Results) error {
840	const limit = 25
841
842	failures := commonFailures(results)
843	if len(failures) > limit {
844		failures = failures[:limit]
845	}
846	sb := strings.Builder{}
847	sb.WriteString(fmt.Sprintf("Top %v most common failures:\n", len(failures)))
848	for _, f := range failures {
849		lines := strings.Split(f.error, "\n")
850		if len(lines) == 1 {
851			line := lines[0]
852			if line != "" {
853				sb.WriteString(fmt.Sprintf(" • %d occurrences: %v: %v\n", f.count, f.status, line))
854			} else {
855				sb.WriteString(fmt.Sprintf(" • %d occurrences: %v\n", f.count, f.status))
856			}
857		} else {
858			sb.WriteString(fmt.Sprintf(" • %d occurrences: %v:\n", f.count, f.status))
859			for _, l := range lines {
860				sb.WriteString("    > ")
861				sb.WriteString(l)
862				sb.WriteString("\n")
863			}
864		}
865		sb.WriteString(fmt.Sprintf("    Example test: %v\n", f.exampleTest))
866
867	}
868	msg := sb.String()
869
870	if r.dryRun {
871		log.Printf("DRY RUN: add most common failures to '%v':\n%v\n", change.ChangeID, msg)
872	} else {
873		log.Printf("Posting most common failures to '%s'\n", change.ChangeID)
874		_, _, err := client.Changes.SetReview(change.ChangeID, change.CurrentRevision, &gerrit.ReviewInput{
875			Message: msg,
876			Tag:     "autogenerated:regress",
877		})
878		if err != nil {
879			return cause.Wrap(err, "Failed to post comments on change '%s'", change.ChangeID)
880		}
881	}
882	return nil
883}
884
885func (r *regres) findTestListChange(client *gerrit.Client) (*gerrit.ChangeInfo, error) {
886	log.Println("Checking for existing test list change")
887	changes, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{
888		QueryOptions: gerrit.QueryOptions{
889			Query: []string{fmt.Sprintf(`status:open+owner:"%v"`, r.gerritEmail)},
890			Limit: 1,
891		},
892		ChangeOptions: gerrit.ChangeOptions{
893			AdditionalFields: []string{"CURRENT_REVISION"},
894		},
895	})
896	if err != nil {
897		return nil, cause.Wrap(err, "Failed to checking for existing test list")
898	}
899	if len(*changes) > 0 {
900		// TODO: This currently assumes that only change changes from
901		// gerritEmail are test lists updates. This may not always be true.
902		return &(*changes)[0], nil
903	}
904	return nil, nil
905}
906
907// changeInfo holds the important information about a single, open change in
908// gerrit.
909type changeInfo struct {
910	id            string    // Gerrit change ID.
911	pending       bool      // Is this change waiting a test for the latest patchset?
912	priority      int       // Calculated priority based on Gerrit labels.
913	latest        git.Hash  // Git hash of the latest patchset in the change.
914	parent        git.Hash  // Git hash of the changelist this change is based on.
915	lastUpdated   time.Time // Time the change was last fetched.
916	commitMessage string
917}
918
919// queryChanges updates the changes map by querying gerrit for the latest open
920// changes.
921func queryChanges(client *gerrit.Client, changes map[string]*changeInfo) error {
922	log.Println("Checking for latest changes")
923	results, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{
924		QueryOptions: gerrit.QueryOptions{
925			Query: []string{"status:open+-age:3d"},
926			Limit: 100,
927		},
928	})
929	if err != nil {
930		return cause.Wrap(err, "Failed to get list of changes")
931	}
932
933	ids := map[string]bool{}
934	for _, r := range *results {
935		ids[r.ChangeID] = true
936	}
937
938	// Add new changes
939	for id := range ids {
940		if _, found := changes[id]; !found {
941			log.Printf("Tracking new change '%v'\n", id)
942			changes[id] = &changeInfo{id: id}
943		}
944	}
945
946	// Remove old changes
947	for id := range changes {
948		if found := ids[id]; !found {
949			log.Printf("Untracking change '%v'\n", id)
950			delete(changes, id)
951		}
952	}
953
954	return nil
955}
956
957// update queries gerrit for information about the given change.
958func (c *changeInfo) update(client *gerrit.Client) error {
959	change, _, err := client.Changes.GetChange(c.id, &gerrit.ChangeOptions{
960		AdditionalFields: []string{"CURRENT_REVISION", "CURRENT_COMMIT", "MESSAGES", "LABELS"},
961	})
962	if err != nil {
963		return cause.Wrap(err, "Getting info for change '%s'", c.id)
964	}
965
966	current, ok := change.Revisions[change.CurrentRevision]
967	if !ok {
968		return fmt.Errorf("Couldn't find current revision for change '%s'", c.id)
969	}
970
971	if len(current.Commit.Parents) == 0 {
972		return fmt.Errorf("Couldn't find current commit for change '%s' has no parents(?)", c.id)
973	}
974
975	kokoroPresubmit := change.Labels["Kokoro-Presubmit"].Approved.AccountID != 0
976	codeReviewScore := change.Labels["Code-Review"].Value
977	codeReviewApproved := change.Labels["Code-Review"].Approved.AccountID != 0
978	presubmitReady := change.Labels["Presubmit-Ready"].Approved.AccountID != 0
979	verifiedScore := change.Labels["Verified"].Value
980
981	c.priority = 0
982	if presubmitReady {
983		c.priority += 10
984	}
985	c.priority += codeReviewScore
986	if codeReviewApproved {
987		c.priority += 2
988	}
989	if kokoroPresubmit {
990		c.priority++
991	}
992
993	// Is the change from a Googler or reviewed by a Googler?
994	canTest := strings.HasSuffix(current.Commit.Committer.Email, "@google.com") ||
995		strings.HasSuffix(change.Labels["Code-Review"].Approved.Email, "@google.com") ||
996		strings.HasSuffix(change.Labels["Code-Review"].Recommended.Email, "@google.com") ||
997		strings.HasSuffix(change.Labels["Presubmit-Ready"].Approved.Email, "@google.com")
998
999	// Don't test if the change has negative scores.
1000	if canTest {
1001		if codeReviewScore < 0 || verifiedScore < 0 {
1002			canTest = false
1003		}
1004	}
1005
1006	// Has the latest patchset already been tested?
1007	if canTest {
1008		for _, msg := range change.Messages {
1009			if msg.RevisionNumber == current.Number &&
1010				strings.Contains(msg.Message, reportHeader) {
1011				canTest = false
1012				break
1013			}
1014		}
1015	}
1016
1017	c.pending = canTest
1018	c.latest = git.ParseHash(change.CurrentRevision)
1019	c.parent = git.ParseHash(current.Commit.Parents[0].Commit)
1020	c.commitMessage = current.Commit.Message
1021
1022	return nil
1023}
1024
1025func (r *regres) newTest(commit git.Hash) *test {
1026	checkoutDir := filepath.Join(r.cacheRoot, "checkout", commit.String())
1027	resDir := filepath.Join(r.cacheRoot, "res", commit.String())
1028	return &test{
1029		r:              r,
1030		commit:         commit,
1031		checkoutDir:    checkoutDir,
1032		resDir:         resDir,
1033		buildDir:       filepath.Join(checkoutDir, "build"),
1034		reactorBackend: backendSubzero,
1035	}
1036}
1037
1038func (t *test) setReactorBackend(reactorBackend reactorBackend) *test {
1039	t.reactorBackend = reactorBackend
1040	return t
1041}
1042
1043type reactorBackend string
1044
1045const (
1046	backendLLVM    reactorBackend = "LLVM"
1047	backendSubzero reactorBackend = "Subzero"
1048)
1049
1050type test struct {
1051	r              *regres
1052	commit         git.Hash       // hash of the commit to test
1053	checkoutDir    string         // directory for the SwiftShader checkout
1054	resDir         string         // directory for the test results
1055	buildDir       string         // directory for SwiftShader build
1056	toolchain      llvm.Toolchain // the toolchain used for building
1057	reactorBackend reactorBackend // backend for SwiftShader build
1058	coverageEnv    *cov.Env       // coverage generation environment (optional).
1059}
1060
1061// cleanup removes any temporary files used by the test.
1062func (t *test) cleanup() {
1063	if t.checkoutDir != "" && !t.r.keepCheckouts {
1064		os.RemoveAll(t.checkoutDir)
1065	}
1066}
1067
1068// checkout clones the test's source commit into t.src.
1069func (t *test) checkout() error {
1070	if util.IsDir(t.checkoutDir) && t.r.keepCheckouts {
1071		log.Printf("Reusing source cache for commit '%s'\n", t.commit)
1072		return nil
1073	}
1074	log.Printf("Checking out '%s'\n", t.commit)
1075	os.RemoveAll(t.checkoutDir)
1076	if err := git.CheckoutRemoteCommit(t.checkoutDir, gitURL, t.commit); err != nil {
1077		return cause.Wrap(err, "Checking out commit '%s'", t.commit)
1078	}
1079	log.Printf("Checked out commit '%s'\n", t.commit)
1080	return nil
1081}
1082
1083// buildAndRun calls t.build() followed by t.run(). Errors are logged and
1084// reported in the returned deqprun.Results.Error field.
1085func (t *test) buildAndRun(testLists testlist.Lists, d deqpBuild) *deqp.Results {
1086	// Build the parent change.
1087	if err := t.build(); err != nil {
1088		msg := fmt.Sprintf("Failed to build '%s'", t.commit)
1089		log.Println(cause.Wrap(err, msg))
1090		return &deqp.Results{Error: msg}
1091	}
1092
1093	// Run the tests on the parent change.
1094	results, err := t.run(testLists, d)
1095	if err != nil {
1096		msg := fmt.Sprintf("Failed to test change '%s'", t.commit)
1097		log.Println(cause.Wrap(err, msg))
1098		return &deqp.Results{Error: msg}
1099	}
1100
1101	return results
1102}
1103
1104// build builds the SwiftShader source into t.buildDir.
1105func (t *test) build() error {
1106	log.Printf("Building '%s'\n", t.commit)
1107
1108	if err := os.MkdirAll(t.buildDir, 0777); err != nil {
1109		return cause.Wrap(err, "Failed to create build directory")
1110	}
1111
1112	args := []string{
1113		`..`,
1114		`-DCMAKE_BUILD_TYPE=Release`,
1115		`-DSWIFTSHADER_DCHECK_ALWAYS_ON=1`,
1116		`-DREACTOR_VERIFY_LLVM_IR=1`,
1117		`-DREACTOR_BACKEND=` + string(t.reactorBackend),
1118		`-DSWIFTSHADER_LLVM_VERSION=10.0`,
1119		`-DSWIFTSHADER_WARNINGS_AS_ERRORS=0`,
1120	}
1121
1122	if t.coverageEnv != nil {
1123		args = append(args, "-DSWIFTSHADER_EMIT_COVERAGE=1")
1124	}
1125
1126	if err := shell.Env(buildTimeout, t.r.cmake, t.buildDir, t.r.toolchainEnv(), args...); err != nil {
1127		return err
1128	}
1129
1130	if err := shell.Shell(buildTimeout, t.r.make, t.buildDir, fmt.Sprintf("-j%d", runtime.NumCPU())); err != nil {
1131		return err
1132	}
1133
1134	return nil
1135}
1136
1137func (t *test) run(testLists testlist.Lists, d deqpBuild) (*deqp.Results, error) {
1138	log.Printf("Running tests for '%s'\n", t.commit)
1139
1140	swiftshaderICDSo := filepath.Join(t.buildDir, "libvk_swiftshader.so")
1141	if !util.IsFile(swiftshaderICDSo) {
1142		return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDSo)
1143	}
1144
1145	swiftshaderICDJSON := filepath.Join(t.buildDir, "Linux", "vk_swiftshader_icd.json")
1146	if !util.IsFile(swiftshaderICDJSON) {
1147		return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDJSON)
1148	}
1149
1150	if *limit != 0 {
1151		log.Printf("Limiting tests to %d\n", *limit)
1152		testLists = append(testlist.Lists{}, testLists...)
1153		for i := range testLists {
1154			testLists[i] = testLists[i].Limit(*limit)
1155		}
1156	}
1157
1158	// Directory for per-test small transient files, such as log files,
1159	// coverage output, etc.
1160	// TODO(bclayton): consider using tmpfs here.
1161	tempDir := filepath.Join(t.buildDir, "temp")
1162	os.MkdirAll(tempDir, 0777)
1163
1164	config := deqp.Config{
1165		ExeEgl:    filepath.Join(d.path, "build", "modules", "egl", "deqp-egl"),
1166		ExeGles2:  filepath.Join(d.path, "build", "modules", "gles2", "deqp-gles2"),
1167		ExeGles3:  filepath.Join(d.path, "build", "modules", "gles3", "deqp-gles3"),
1168		ExeVulkan: filepath.Join(d.path, "build", "external", "vulkancts", "modules", "vulkan", "deqp-vk"),
1169		TempDir:   tempDir,
1170		TestLists: testLists,
1171		Env: []string{
1172			"LD_LIBRARY_PATH=" + t.buildDir + ":" + os.Getenv("LD_LIBRARY_PATH"),
1173			"VK_ICD_FILENAMES=" + swiftshaderICDJSON,
1174			"DISPLAY=" + os.Getenv("DISPLAY"),
1175			"LIBC_FATAL_STDERR_=1", // Put libc explosions into logs.
1176		},
1177		LogReplacements: map[string]string{
1178			t.checkoutDir: "<SwiftShader>",
1179		},
1180		NumParallelTests: numParallelTests,
1181		TestTimeout:      testTimeout,
1182		CoverageEnv:      t.coverageEnv,
1183	}
1184
1185	return config.Run()
1186}
1187
1188func (t *test) writeTestListsByStatus(testLists testlist.Lists, results *deqp.Results) ([]string, error) {
1189	out := []string{}
1190
1191	for _, list := range testLists {
1192		files := map[testlist.Status]*os.File{}
1193		for _, status := range testlist.Statuses {
1194			path := testlist.FilePathWithStatus(filepath.Join(t.checkoutDir, list.File), status)
1195			dir := filepath.Dir(path)
1196			os.MkdirAll(dir, 0777)
1197			f, err := os.Create(path)
1198			if err != nil {
1199				return nil, cause.Wrap(err, "Couldn't create file '%v'", path)
1200			}
1201			defer f.Close()
1202			files[status] = f
1203
1204			out = append(out, path)
1205		}
1206
1207		for _, testName := range list.Tests {
1208			if r, found := results.Tests[testName]; found {
1209				fmt.Fprintln(files[r.Status], testName)
1210			}
1211		}
1212	}
1213
1214	return out, nil
1215}
1216
1217// resultsCachePath returns the path to the cache results file for the given
1218// test, testlists and deqpBuild.
1219func (t *test) resultsCachePath(testLists testlist.Lists, d deqpBuild) string {
1220	return filepath.Join(t.resDir, testLists.Hash(), d.hash)
1221}
1222
1223type testStatusAndError struct {
1224	status testlist.Status
1225	error  string
1226}
1227
1228type commonFailure struct {
1229	count int
1230	testStatusAndError
1231	exampleTest string
1232}
1233
1234func commonFailures(results *deqp.Results) []commonFailure {
1235	failures := map[testStatusAndError]int{}
1236	examples := map[testStatusAndError]string{}
1237	for name, test := range results.Tests {
1238		if !test.Status.Failing() {
1239			continue
1240		}
1241		key := testStatusAndError{test.Status, test.Err}
1242		if count, ok := failures[key]; ok {
1243			failures[key] = count + 1
1244		} else {
1245			failures[key] = 1
1246			examples[key] = name
1247		}
1248	}
1249	out := make([]commonFailure, 0, len(failures))
1250	for failure, count := range failures {
1251		out = append(out, commonFailure{count, failure, examples[failure]})
1252	}
1253	sort.Slice(out, func(i, j int) bool { return out[i].count > out[j].count })
1254	return out
1255}
1256
1257// compare returns a string describing all differences between two
1258// deqprun.Results, and a boolean indicating that this there are differences
1259// that are considered important.
1260// This string is used as the report message posted to the gerrit code review.
1261func compare(old, new *deqp.Results) (msg string, alert bool) {
1262	if old.Error != "" {
1263		return old.Error, false
1264	}
1265	if new.Error != "" {
1266		return new.Error, true
1267	}
1268
1269	oldStatusCounts, newStatusCounts := map[testlist.Status]int{}, map[testlist.Status]int{}
1270	totalTests := 0
1271
1272	broken, fixed, failing, removed, changed := []string{}, []string{}, []string{}, []string{}, []string{}
1273
1274	for test, new := range new.Tests {
1275		old, found := old.Tests[test]
1276		if !found {
1277			log.Printf("Test result for '%s' not found on old change\n", test)
1278			continue
1279		}
1280		switch {
1281		case !old.Status.Failing() && new.Status.Failing():
1282			broken = append(broken, test)
1283			alert = true
1284		case !old.Status.Passing() && new.Status.Passing():
1285			fixed = append(fixed, test)
1286		case old.Status != new.Status:
1287			changed = append(changed, test)
1288			alert = true
1289		case old.Status.Failing() && new.Status.Failing():
1290			failing = append(failing, test) // Still broken
1291			alert = true
1292		}
1293		totalTests++
1294		if found {
1295			oldStatusCounts[old.Status] = oldStatusCounts[old.Status] + 1
1296		}
1297		newStatusCounts[new.Status] = newStatusCounts[new.Status] + 1
1298	}
1299
1300	for test := range old.Tests {
1301		if _, found := new.Tests[test]; !found {
1302			removed = append(removed, test)
1303		}
1304	}
1305
1306	sb := strings.Builder{}
1307
1308	// list prints the list l to sb, truncating after a limit.
1309	list := func(l []string) {
1310		const max = 10
1311		for i, s := range l {
1312			sb.WriteString("  ")
1313			if i == max {
1314				sb.WriteString(fmt.Sprintf("> %d more\n", len(l)-i))
1315				break
1316			}
1317			sb.WriteString(fmt.Sprintf("> %s", s))
1318			if n, ok := new.Tests[s]; ok {
1319				if o, ok := old.Tests[s]; ok && n != o {
1320					sb.WriteString(fmt.Sprintf(" - [%s -> %s]", o.Status, n.Status))
1321				} else {
1322					sb.WriteString(fmt.Sprintf(" - [%s]", n.Status))
1323				}
1324				sb.WriteString("\n")
1325				for _, line := range strings.Split(n.Err, "\n") {
1326					if line != "" {
1327						sb.WriteString(fmt.Sprintf("     %v\n", line))
1328					}
1329				}
1330			} else {
1331				sb.WriteString("\n")
1332			}
1333		}
1334	}
1335
1336	if n := len(broken); n > 0 {
1337		sort.Strings(broken)
1338		sb.WriteString(fmt.Sprintf("\n--- This change breaks %d tests: ---\n", n))
1339		list(broken)
1340	}
1341	if n := len(fixed); n > 0 {
1342		sort.Strings(fixed)
1343		sb.WriteString(fmt.Sprintf("\n--- This change fixes %d tests: ---\n", n))
1344		list(fixed)
1345	}
1346	if n := len(removed); n > 0 {
1347		sort.Strings(removed)
1348		sb.WriteString(fmt.Sprintf("\n--- This change removes %d tests: ---\n", n))
1349		list(removed)
1350	}
1351	if n := len(changed); n > 0 {
1352		sort.Strings(changed)
1353		sb.WriteString(fmt.Sprintf("\n--- This change alters %d tests: ---\n", n))
1354		list(changed)
1355	}
1356
1357	if len(broken) == 0 && len(fixed) == 0 && len(removed) == 0 && len(changed) == 0 {
1358		sb.WriteString(fmt.Sprintf("\n--- No change in test results ---\n"))
1359	}
1360
1361	sb.WriteString(fmt.Sprintf("          Total tests: %d\n", totalTests))
1362	for _, s := range []struct {
1363		label  string
1364		status testlist.Status
1365	}{
1366		{"                 Pass", testlist.Pass},
1367		{"                 Fail", testlist.Fail},
1368		{"              Timeout", testlist.Timeout},
1369		{"      UNIMPLEMENTED()", testlist.Unimplemented},
1370		{"        UNSUPPORTED()", testlist.Unsupported},
1371		{"        UNREACHABLE()", testlist.Unreachable},
1372		{"             ASSERT()", testlist.Assert},
1373		{"              ABORT()", testlist.Abort},
1374		{"                Crash", testlist.Crash},
1375		{"        Not Supported", testlist.NotSupported},
1376		{"Compatibility Warning", testlist.CompatibilityWarning},
1377		{"      Quality Warning", testlist.QualityWarning},
1378	} {
1379		old, new := oldStatusCounts[s.status], newStatusCounts[s.status]
1380		if old == 0 && new == 0 {
1381			continue
1382		}
1383		change := util.Percent64(int64(new-old), int64(old))
1384		switch {
1385		case old == new:
1386			sb.WriteString(fmt.Sprintf("%s: %v\n", s.label, new))
1387		case change == 0:
1388			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d)\n", s.label, old, new, new-old))
1389		default:
1390			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d %+d%%)\n", s.label, old, new, new-old, change))
1391		}
1392	}
1393
1394	if old, new := old.Duration, new.Duration; old != 0 && new != 0 {
1395		label := "           Time taken"
1396		change := util.Percent64(int64(new-old), int64(old))
1397		switch {
1398		case old == new:
1399			sb.WriteString(fmt.Sprintf("%s: %v\n", label, new))
1400		case change == 0:
1401			sb.WriteString(fmt.Sprintf("%s: %v -> %v\n", label, old, new))
1402		default:
1403			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d%%)\n", label, old, new, change))
1404		}
1405	}
1406
1407	type timingDiff struct {
1408		old      time.Duration
1409		new      time.Duration
1410		relDelta float64
1411		name     string
1412	}
1413
1414	timingDiffs := []timingDiff{}
1415	for name, new := range new.Tests {
1416		if old, ok := old.Tests[name]; ok {
1417			old, new := old.TimeTaken, new.TimeTaken
1418			delta := new.Seconds() - old.Seconds()
1419			absDelta := math.Abs(delta)
1420			relDelta := delta / old.Seconds()
1421			if absDelta > 2.0 && math.Abs(relDelta) > 0.05 { // If change > ±2s and > than ±5% old time...
1422				timingDiffs = append(timingDiffs, timingDiff{
1423					old:      old,
1424					new:      new,
1425					name:     name,
1426					relDelta: relDelta,
1427				})
1428			}
1429		}
1430	}
1431	if len(timingDiffs) > 0 {
1432		sb.WriteString(fmt.Sprintf("\n--- Test duration changes ---\n"))
1433		const limit = 10
1434		if len(timingDiffs) > limit {
1435			sort.Slice(timingDiffs, func(i, j int) bool { return math.Abs(timingDiffs[i].relDelta) > math.Abs(timingDiffs[j].relDelta) })
1436			timingDiffs = timingDiffs[:limit]
1437		}
1438		sort.Slice(timingDiffs, func(i, j int) bool { return timingDiffs[i].relDelta < timingDiffs[j].relDelta })
1439		for _, d := range timingDiffs {
1440			percent := util.Percent64(int64(d.new-d.old), int64(d.old))
1441			sb.WriteString(fmt.Sprintf("  > %v: %v -> %v (%+d%%)\n", d.name, d.old, d.new, percent))
1442		}
1443	}
1444
1445	return sb.String(), alert
1446}
1447
1448// loadTestLists loads the full test lists from the json file.
1449// The file is first searched at {t.srcDir}/{relPath}
1450// If this cannot be found, then the file is searched at the fallback path
1451// {CWD}/{relPath}
1452// This allows CLs to alter the list of tests to be run, as well as providing
1453// a default set.
1454func (t *test) loadTestLists(relPath string) (testlist.Lists, error) {
1455	// Seach for the test.json file in the checked out source directory.
1456	if path := filepath.Join(t.checkoutDir, relPath); util.IsFile(path) {
1457		log.Printf("Loading test list '%v' from commit\n", relPath)
1458		return testlist.Load(t.checkoutDir, path)
1459	}
1460
1461	// Not found there. Search locally.
1462	wd, err := os.Getwd()
1463	if err != nil {
1464		return testlist.Lists{}, cause.Wrap(err, "Couldn't get current working directory")
1465	}
1466	if path := filepath.Join(wd, relPath); util.IsFile(path) {
1467		log.Printf("Loading test list '%v' from regres\n", relPath)
1468		return testlist.Load(wd, relPath)
1469	}
1470
1471	return nil, errors.New("Couldn't find a test list file")
1472}
1473
1474type date struct {
1475	year  int
1476	month time.Month
1477	day   int
1478}
1479
1480func toDate(t time.Time) date {
1481	d := date{}
1482	d.year, d.month, d.day = t.Date()
1483	return d
1484}
1485