1// Copyright 2020 The Chromium OS Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// ppdTool is a command line tool that can:
6//  * download all PPD files from the database kept on the SCS server;
7//  * cluster given set of PPD files and return a minimal subset of PPDs that
8//    represents resultant clusters. This is useful for choosing a subset of
9//    PPD files for testing.
10//
11// The tool can be run with the command:
12//    go run ppdTool.go
13// Use -h parameter to print some help and list of accepted parameters.
14//
15// The tool can be also compiled to the binary file with the following command:
16//    go build pdfTool.go
17
18package main
19
20import (
21	"bufio"
22	"bytes"
23	"compress/gzip"
24	"encoding/json"
25	"flag"
26	"fmt"
27	"io"
28	"io/ioutil"
29	"log"
30	"net/http"
31	"os"
32	"path/filepath"
33	"regexp"
34	"sort"
35	"strings"
36	"sync"
37)
38
39// downloadFile starts to download the content from given url with HTTP GET. It
40// returns a reader to the content. In case of an error the function terminates
41// the program.
42func downloadFile(url string) io.ReadCloser {
43	response, err := http.Get(url)
44	if err != nil {
45		log.Fatalf("Cannot HTTP GET the file %s: %s.\n", url, err)
46	}
47	if response.StatusCode != 200 {
48		response.Body.Close()
49		log.Fatalf("HTTP GET for the file %s returned status code %d.\n", url, response.StatusCode)
50	}
51	return response.Body
52}
53
54// downloadFilenamesFromPPDIndex retrieves from the index a list of all PPD
55// files. Returned PPD filenames are sorted and unique. In case of an error
56// the function terminates the program.
57func downloadFilenamesFromPPDIndex() []string {
58	const urlMetadata = "https://printerconfigurations.googleusercontent.com/chromeos_printing/metadata_v3/"
59
60	output := make(map[string]bool)
61	for i := 0; i < 20; i++ {
62		// Calculate a URL of the index file.
63		urlPPDIndex := fmt.Sprintf("%sindex-%02d.json", urlMetadata, i)
64		// Download and parse the index file.
65		respBody := downloadFile(urlPPDIndex)
66		defer respBody.Close()
67		body, err := ioutil.ReadAll(respBody)
68		if err != nil {
69			log.Fatalf("Cannot read the content of %s: %s.\n", urlPPDIndex, err)
70		}
71		// Parse the json structure and extract PPD filenames.
72		type jsonName struct {
73			Name string `json:"name"`
74		}
75		type jsonMetadata struct {
76			PPDMetadata []jsonName `json:"ppdMetadata"`
77		}
78		type jsonPrinters struct {
79			PPDIndex map[string]jsonMetadata `json:"ppdIndex"`
80		}
81		var data jsonPrinters
82		if err = json.Unmarshal(body, &data); err != nil {
83			log.Fatalf("Cannot parse the content of %s: %s.\n", urlPPDIndex, err)
84		}
85		for _, entry := range data.PPDIndex {
86			for _, element := range entry.PPDMetadata {
87				output[element.Name] = true
88			}
89		}
90	}
91
92	// Sort filenames.
93	results := make([]string, 0, len(output))
94	for filename := range output {
95		results = append(results, filename)
96	}
97	sort.Strings(results)
98
99	return results
100}
101
102// listFilenamesFromDirectory returns a list of filenames from the given
103// directory. In case of an error the function terminates the program.
104func listFilenamesFromDirectory(path string) []string {
105	files, err := ioutil.ReadDir(path)
106	if err != nil {
107		log.Fatalf("Cannot open the directory %s: %s.\n", path, err)
108	}
109	filenames := make([]string, 0, len(files))
110	for _, file := range files {
111		if !file.IsDir() {
112			filenames = append(filenames, file.Name())
113		}
114	}
115	return filenames
116}
117
118// Statement represents a single statement from a PPD file.
119type Statement struct {
120	keyword string
121	option  string
122	value   string
123}
124
125// PPD represents a content of a single PPD file as an array of Statements.
126// The field name holds the filename of the PPD file while the field
127// originalDataSize holds the initial size of the field data.
128type PPD struct {
129	name             string
130	data             []Statement
131	originalDataSize int
132}
133
134var reComment = regexp.MustCompile(`^\*[ \t]*%`)
135var reKeywordOptionValue = regexp.MustCompile(`^\*[ \t]*([^: \t]+)([ \t]+[^:]+)?[ \t]*:[ \t]*([^ \t].*)?$`)
136var reKeywordOnly = regexp.MustCompile(`^\*[ \t]*([^: \t]+)[ \t]*$`)
137var reEmptyLine = regexp.MustCompile(`^[ \t]*$`)
138
139// parseLine parses a single line from PPD file. The line is supposed to be the
140// first line of statement's definition. If the line contains white characters
141// only or is a comment the function returns empty Statement (st.keyword == "")
142// and finish with success (ok == true).
143func parseLine(line string) (st Statement, ok bool) {
144	if reComment.MatchString(line) {
145		return st, true
146	}
147	if m := reKeywordOptionValue.FindStringSubmatch(line); m != nil {
148		st.keyword = m[1]
149		st.option = m[2]
150		st.value = m[3]
151		return st, true
152	}
153	if m := reKeywordOnly.FindStringSubmatch(line); m != nil {
154		st.keyword = m[1]
155		return st, true
156	}
157	if reEmptyLine.MatchString(line) {
158		return st, true
159	}
160	return st, false
161}
162
163// ParsePPD parses a content of a PPD file. The parameter name is the filename
164// of the PPD file (the source of the content).
165func ParsePPD(name string, content []byte) (PPD, error) {
166	ppd := PPD{name: name, data: make([]Statement, 0, 512)}
167	scanner := bufio.NewScanner(bytes.NewReader(content))
168	var multilineValue = false
169	for lineNo := 1; scanner.Scan(); lineNo++ {
170		line := scanner.Text()
171		if multilineValue {
172			// We are inside a multiline value.
173			ppd.data[len(ppd.data)-1].value += "\n" + line
174			// Check for closing ".
175			multilineValue = (strings.Count(line, "\"")%2 == 0)
176			continue
177		}
178
179		st, ok := parseLine(line)
180		if !ok {
181			return ppd, fmt.Errorf("Cannot parse line %d: %s", lineNo, line)
182		}
183		if st.keyword == "" {
184			// A comment or an empty line.
185			continue
186		}
187		ppd.data = append(ppd.data, st)
188		// Check for unmatched " in the value.
189		multilineValue = (strings.Count(st.value, "\"")%2 != 0)
190	}
191	ppd.originalDataSize = len(ppd.data)
192	return ppd, scanner.Err()
193}
194
195var reWhiteSpaces = regexp.MustCompile(`[ \t]+`)
196
197// normalizeSpacesAndTabs normalizes subsequences of spaces and tabulators in
198// the given string. All leading and trailing spaces and tabs are removed.
199// Every subsequence consisting of spaces and tabulators is replaced by a
200// single space.
201func normalizeSpacesAndTabs(str *string) {
202	*str = strings.TrimSpace(*str)
203	*str = reWhiteSpaces.ReplaceAllString(*str, " ")
204}
205
206var keywordsToRemove = map[string]bool{
207	"1284DeviceID":           true,
208	"cupsLanguages":          true,
209	"cupsVersion":            true,
210	"DefaultDocCutType":      true,
211	"DefaultInstalledMemory": true,
212	"DefaultPageCutType":     true,
213	"DocCutType":             true,
214	"driverUrl":              true,
215	"End":                    true,
216	"FileVersion":            true,
217	"FoomaticIDs":            true,
218	"InstalledMemory":        true,
219	"Manufacturer":           true,
220	"ModelName":              true,
221	"NickName":               true,
222	"PageCutType":            true,
223	"PCFileName":             true,
224	"Product":                true,
225	"ShortNickName":          true,
226	"Throughput":             true}
227
228var shortLang = regexp.MustCompile(`^[a-z][a-z]\.`)
229var longLang = regexp.MustCompile(`^[a-z][a-z]_[A-Za-z][A-Za-z]\.`)
230
231// normalizePPD processes the given PPD content to make it suitable for
232// comparison with other PPDs. The PPD may be no longer valid after this
233// transformation. The following operations are performed on the PPD:
234// * all statements with keyword included in the global variable
235//   keywordsToRemove are removed;
236// * all statements with keyword with prefix matching ^[a-z][a-z]\. or
237//   ^[a-z][a-z]_[A-Za-z][A-Za-z]\. are removed (like *pl.MediaType,
238//	 *de.Translation, *fr_CA.Translation, *zh_TW.MediaType, etc.);
239// * subsequences of white spaces in all statements are normalized with
240//   the use of normalizeSpacesAndTabs(...)
241func normalizePPD(ppd *PPD) {
242	newData := make([]Statement, 0, len(ppd.data))
243	for _, s := range ppd.data {
244		if keywordsToRemove[s.keyword] {
245			continue
246		}
247		if shortLang.MatchString(s.keyword) || longLang.MatchString(s.keyword) {
248			continue
249		}
250		normalizeSpacesAndTabs(&s.option)
251		normalizeSpacesAndTabs(&s.value)
252		newData = append(newData, s)
253	}
254	ppd.data = newData
255}
256
257// parseAndNormalizePPDFile reads the content of a PPD file from the given
258// reader and parses it. The content is also normalized with the normalizePPD
259// function. In case of an error the function terminates the program.
260func parseAndNormalizePPDFile(reader io.ReadCloser, filename string) PPD {
261	// Decompress the content if needed.
262	if strings.HasSuffix(filename, ".gz") {
263		defer reader.Close()
264		decomp, err := gzip.NewReader(reader)
265		if err != nil {
266			log.Fatalf("Error when decompressing the file %s: %s.\n", filename, err)
267		}
268		reader = decomp
269	}
270	defer reader.Close()
271	content, err := ioutil.ReadAll(reader)
272	if err != nil {
273		log.Fatalf("Error when reading a content of the file %s: %s.\n", filename, err)
274	}
275	ppd, err := ParsePPD(filename, content)
276	if err != nil {
277		log.Fatalf("Error when parsing a content of the file %s: %s.\n", filename, err)
278	}
279	normalizePPD(&ppd)
280	return ppd
281}
282
283// checkNotExists terminates the program when the given path exists.
284func checkNotExists(path string) {
285	_, err := os.Stat(path)
286	if os.IsNotExist(err) {
287		return
288	}
289	if err == nil {
290		log.Fatal("File or directory '" + path + "' already exists.")
291	}
292	log.Fatalf("Cannot access '%s': %s.\n", path, err)
293}
294
295// divideIntoLargeClusters divides the input set of PPDs into clusters of PPDs
296// with the same content (data). The output slice contains the resultant
297// clusters saved as a list of PPD names.
298func divideIntoLargeClusters(ppds []PPD) [][]string {
299	type ppdTypeDefinition struct {
300		cupsFilter             string
301		cupsModelNumber        string
302		cupsPreFilter          string
303		driverName             string
304		driverType             string
305		foomaticRIPCommandLine string
306	}
307
308	groups := make(map[ppdTypeDefinition][]int)
309	for iPPD, ppd := range ppds {
310		chosenKeywords := make(map[string][]string)
311		for _, st := range ppd.data {
312			switch st.keyword {
313			case "cupsFilter", "cupsFilter2", "cupsModelNumber", "cupsPreFilter", "FoomaticRIPCommandLine":
314				chosenKeywords[st.keyword] = append(chosenKeywords[st.keyword], st.value)
315			case "driverName", "driverType":
316				chosenKeywords[st.keyword] = append(chosenKeywords[st.keyword], st.option)
317			}
318		}
319		if values, ok := chosenKeywords["cupsFilter2"]; ok {
320			chosenKeywords["cupsFilter"] = values
321			delete(chosenKeywords, "cupsFilter2")
322		}
323		var hash ppdTypeDefinition
324		for keyword, values := range chosenKeywords {
325			sort.Slice(values, func(i, j int) bool { return values[i] < values[j] })
326			switch keyword {
327			case "cupsFilter":
328				hash.cupsFilter = strings.Join(values, " | ")
329			case "cupsModelNumber":
330				hash.cupsModelNumber = strings.Join(values, " | ")
331			case "cupsPreFilter":
332				hash.cupsPreFilter = strings.Join(values, " | ")
333			case "driverName":
334				hash.driverName = strings.Join(values, " | ")
335			case "driverType":
336				hash.driverType = strings.Join(values, " | ")
337			case "FoomaticRIPCommandLine":
338				hash.foomaticRIPCommandLine = strings.Join(values, " | ")
339			}
340		}
341		groups[hash] = append(groups[hash], iPPD)
342	}
343
344	// Sort every group by originalDataSize(decreasing), name(alphabetically).
345	for _, ppdIDs := range groups {
346		sort.Slice(ppdIDs, func(i, j int) bool {
347			p1 := ppdIDs[i]
348			p2 := ppdIDs[j]
349			if ppds[p1].originalDataSize == ppds[p2].originalDataSize {
350				return ppds[p1].name < ppds[p2].name
351			}
352			return ppds[p1].originalDataSize > ppds[p2].originalDataSize
353		})
354	}
355
356	// Convert groups to a slice of slices with names.
357	groupsSlice := make([][]string, 0, len(groups))
358	for _, group := range groups {
359		names := make([]string, len(group))
360		for i, iPPD := range group {
361			names[i] = ppds[iPPD].name
362		}
363		groupsSlice = append(groupsSlice, names)
364	}
365
366	sort.Slice(groupsSlice, func(i, j int) bool {
367		return groupsSlice[i][0] < groupsSlice[j][0]
368	})
369
370	return groupsSlice
371}
372
373// compareSameSizePPDs is a helper function for divideIntoSmallClusters. It
374// divides the set of PPDs into clusters of PPDs with the same data. The input
375// PPDs must have the same size of data field. The function returns resultant
376// clusters as slices with PPDs names.
377func compareSameSizePPDs(ppds []PPD) [][]string {
378	// This map holds PPDID->groupID. At the beginning, every PPD is assigned
379	// to a one-element group.
380	ppdsGroups := make([]int, len(ppds))
381	for i := range ppdsGroups {
382		ppdsGroups[i] = i
383	}
384
385	// Find PPDs with the same data and assign them to the same group.
386	for i1, e1 := range ppds {
387		if ppdsGroups[i1] != i1 {
388			// This PPD was already assigned.
389			continue
390		}
391		for i2 := i1 + 1; i2 < len(ppds); i2++ {
392			e2 := ppds[i2]
393			if ppdsGroups[i2] != i2 {
394				// This PPD was already assigned.
395				continue
396			}
397			// Compare data.
398			match := true
399			for ip, s1 := range e1.data {
400				s2 := e2.data[ip]
401				if s1 != s2 {
402					match = false
403					break
404				}
405			}
406			if match {
407				// Assign i2 to the same group as i1.
408				ppdsGroups[i2] = i1
409			}
410		}
411	}
412
413	// This map contains groupID->[]PPDID.
414	groups := make(map[int][]int)
415	for iPPD, iGroup := range ppdsGroups {
416		groups[iGroup] = append(groups[iGroup], iPPD)
417	}
418	// Sort every group by originalDataSize(decreasing), name(alphabetically).
419	for _, ppdIDs := range groups {
420		sort.Slice(ppdIDs, func(i, j int) bool {
421			p1 := ppdIDs[i]
422			p2 := ppdIDs[j]
423			if ppds[p1].originalDataSize == ppds[p2].originalDataSize {
424				return ppds[p1].name < ppds[p2].name
425			}
426			return ppds[p1].originalDataSize > ppds[p2].originalDataSize
427		})
428	}
429
430	// Convert groups to a slice of slices with names.
431	groupsSlice := make([][]string, 0, len(groups))
432	for _, group := range groups {
433		names := make([]string, len(group))
434		for i, iPPD := range group {
435			names[i] = ppds[iPPD].name
436		}
437		groupsSlice = append(groupsSlice, names)
438	}
439
440	return groupsSlice
441}
442
443// divideIntoSmallClusters divides the input set of PPDs into clusters of PPDs
444// with the same content (data). The output slice contains the resultant
445// clusters saved as a list of PPD names.
446func divideIntoSmallClusters(ppds []PPD) [][]string {
447
448	type ppdHash struct {
449		dataSize        int
450		firstStatement  Statement
451		middleStatement Statement
452		lastStatement   Statement
453	}
454
455	ppdsByHash := make(map[ppdHash][]PPD)
456	for _, ppd := range ppds {
457		var hash ppdHash
458		hash.dataSize = len(ppd.data)
459		hash.firstStatement = ppd.data[0]
460		hash.middleStatement = ppd.data[len(ppd.data)/2]
461		hash.lastStatement = ppd.data[len(ppd.data)-1]
462		ppdsByHash[hash] = append(ppdsByHash[hash], ppd)
463	}
464
465	chGroups := make(chan [][]string, len(ppdsByHash))
466	for _, ppdsToCompare := range ppdsByHash {
467		go func(ppdsToCompare []PPD) {
468			chGroups <- compareSameSizePPDs(ppdsToCompare)
469		}(ppdsToCompare)
470	}
471	var groups [][]string
472	for range ppdsByHash {
473		groups = append(groups, <-chGroups...)
474	}
475	close(chGroups)
476
477	sort.Slice(groups, func(i, j int) bool {
478		return groups[i][0] < groups[j][0]
479	})
480
481	return groups
482}
483
484// saveClustersToFile creates a new file at given path and saves there the
485// given list of clusters. In case of any error the function terminates the
486// program.
487func saveClustersToFile(clusters [][]string, path string) {
488	file, err := os.Create(path)
489	if err != nil {
490		log.Fatalf("Cannot create a file %s: %s.\n", path, err)
491	}
492	defer file.Close()
493	for _, cluster := range clusters {
494		file.WriteString(strings.Join(cluster, "\t"))
495		file.WriteString("\n")
496	}
497}
498
499// createDirectoryWithPPDs creates directory given in the parameter pathTrg and
500// copies there the given set of files from the directory defined in pathSrc.
501// In case of any error the function terminates the program.
502func createDirectoryWithPPDs(pathSrc string, filenames []string, pathTrg string) {
503	if err := os.MkdirAll(pathTrg, 0755); err != nil {
504		log.Fatalf("Cannot create a directory '%s': %s.\n", pathTrg, err)
505	}
506	for _, filename := range filenames {
507		src := filepath.Join(pathSrc, filename)
508		trg := filepath.Join(pathTrg, filename)
509		if err := os.Link(src, trg); err != nil {
510			log.Fatalf("Cannot create a hard link %s for the file %s: %s.\n", trg, src, err)
511		}
512	}
513}
514
515func commandCompare(args []string) {
516	const filenameLargeClusters = "large_clusters.txt"
517	const filenameSmallClusters = "small_clusters.txt"
518	const dirnameCorePPDs = "ppds_core"
519	const dirnameExtPPDs = "ppds_ext"
520
521	flags := flag.NewFlagSet("compare", flag.ExitOnError)
522	flagInput := flags.String("input", "ppds_all", "Directory with PPD files.")
523	flagOutput := flags.String("output", ".", "Directory to save results. It is created if not exists.")
524	flags.Parse(args)
525
526	if len(flags.Args()) > 0 {
527		log.Fatal("Unknown parameter. Run with -h or --help to see the list of supported parameters.")
528	}
529
530	pathLargeClusters := filepath.Join(*flagOutput, filenameLargeClusters)
531	pathSmallClusters := filepath.Join(*flagOutput, filenameSmallClusters)
532	pathCorePPDs := filepath.Join(*flagOutput, dirnameCorePPDs)
533	pathExtPPDs := filepath.Join(*flagOutput, dirnameExtPPDs)
534
535	checkNotExists(pathLargeClusters)
536	checkNotExists(pathSmallClusters)
537	checkNotExists(pathCorePPDs)
538	checkNotExists(pathExtPPDs)
539
540	fmt.Println("Reading a list of PPD files from the directory...")
541	filenames := listFilenamesFromDirectory(*flagInput)
542	fmt.Printf("Found %d files.\n", len(filenames))
543
544	fmt.Println("Processing all files...")
545	ppds := make([]PPD, len(filenames))
546	var wg sync.WaitGroup
547	for i, filename := range filenames {
548		wg.Add(1)
549		go func(i int, filename string) {
550			defer wg.Done()
551			path := filepath.Join(*flagInput, filename)
552			reader, err := os.Open(path)
553			if err != nil {
554				log.Fatalf("Cannot open the file %s: %s.\n", path, err)
555			}
556			ppds[i] = parseAndNormalizePPDFile(reader, filename)
557		}(i, filename)
558	}
559	wg.Wait()
560	fmt.Println("Done.")
561
562	fmt.Println("Calculating small clusters...")
563	groupsSmall := divideIntoSmallClusters(ppds)
564	fmt.Printf("Done. The number of small clusters: %d.\n", len(groupsSmall))
565
566	fmt.Println("Calculating large clusters...")
567	groupsLarge := divideIntoLargeClusters(ppds)
568	fmt.Printf("Done. The number of large clusters: %d.\n", len(groupsLarge))
569
570	filenamesCore := make([]string, 0, len(groupsLarge))
571	setFilenameCore := make(map[string]bool)
572	for _, group := range groupsLarge {
573		filenamesCore = append(filenamesCore, group[0])
574		setFilenameCore[group[0]] = true
575	}
576	filenamesExt := make([]string, 0, len(groupsSmall))
577	for _, group := range groupsSmall {
578		if !setFilenameCore[group[0]] {
579			filenamesExt = append(filenamesExt, group[0])
580		}
581	}
582
583	// Save results.
584	createDirectoryWithPPDs(*flagInput, filenamesCore, pathCorePPDs)
585	createDirectoryWithPPDs(*flagInput, filenamesExt, pathExtPPDs)
586	saveClustersToFile(groupsSmall, pathSmallClusters)
587	saveClustersToFile(groupsLarge, pathLargeClusters)
588}
589
590func commandDownload(args []string) {
591	const urlPPD = "https://printerconfigurations.googleusercontent.com/chromeos_printing/ppds_for_metadata_v3/"
592	const maxNumberOfParallelDownloads = 4
593
594	flags := flag.NewFlagSet("download", flag.ExitOnError)
595	flagOutput := flags.String("output", "ppds_all", "Directory to save PPD files, it cannot exist.")
596	flags.Parse(args)
597
598	if len(flags.Args()) > 0 {
599		log.Fatal("Unknown parameter. Run with -h or --help to see the list of supported parameters.")
600	}
601	checkNotExists(*flagOutput)
602	if err := os.MkdirAll(*flagOutput, 0755); err != nil {
603		log.Fatalf("Cannot create a directory '%s': %s.\n", *flagOutput, err)
604	}
605
606	fmt.Println("Downloading a list of PPD files from the index...")
607	filenames := downloadFilenamesFromPPDIndex()
608	fmt.Printf("Found %d files.\n", len(filenames))
609
610	fmt.Println("Downloading PPD files...")
611	chFilenames := make(chan string)
612	var wgEnd sync.WaitGroup
613	for i := 0; i < maxNumberOfParallelDownloads; i++ {
614		wgEnd.Add(1)
615		go func() {
616			defer wgEnd.Done()
617			for filename := range chFilenames {
618				reader := downloadFile(urlPPD + filename)
619				path := filepath.Join(*flagOutput, filename)
620				file, err := os.Create(path)
621				if err != nil {
622					log.Fatalf("Cannot create file %s on the disk: %s.\n", path, err)
623				}
624				if _, err = io.Copy(file, reader); err != nil {
625					log.Fatalf("Cannot copy the content of the file %s: %s.\n", path, err)
626				}
627				reader.Close()
628				file.Close()
629			}
630		}()
631	}
632	for _, filename := range filenames {
633		chFilenames <- filename
634	}
635	close(chFilenames)
636	wgEnd.Wait()
637	fmt.Println("Done")
638}
639
640const usageText = `
641The first parameter must be one of the following commands:
642
643  download - downloads all PPDs from the index to the given directory.
644
645  compare - perform two independent clusterizations on the given set of PPD
646      files. Two sets of clusters are calculated:
647       * a set of large clusters where PPD are grouped together by pipeline
648           types;
649       * a set of small clusters where PPD are grouped together by their
650           similarity.
651      For both results a minimal subsets of representative PPDs are calculated.
652      In the output directory, the following files and directories are created:
653       * large_clusters.txt - a file with PPD names grouped in large clusters
654       * small_clusters.txt - a file with PPD names grouped in small clusters
655       * ppds_core - a directory with hard links to PPD files representing
656           large clusters, each cluster is represented by exactly one PPD file.
657           For the full PPD dataset given on the input, this directory is
658           supposed to have around ~100 PPD files;
659       * ppds_ext - a directory with hard links to PPD files representing
660           small clusters, each cluster is represented by exactly one PPD file.
661           IF A PPD FILE IS ALREADY PRESENT IN core_ppds IT IS OMITTED. For the
662           full PPD dataset given on the input, this directory is supposed to
663           have around ~1500 PPD files minus ~100 PPD files already present in
664           the core_ppd directory.
665
666Run one of the commands with '-h' or '--help' to get a list of parameters.
667`
668
669func main() {
670	if len(os.Args) < 2 {
671		fmt.Println(usageText)
672		return
673	}
674
675	switch os.Args[1] {
676	case "compare":
677		commandCompare(os.Args[2:])
678	case "download":
679		commandDownload(os.Args[2:])
680	default:
681		fmt.Println(usageText)
682	}
683}
684