1# Copyright (c) 2015 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import collections
6import os
7import re
8
9import tracing_project
10from tracing_build import check_common
11
12
13class _Token(object):
14
15  def __init__(self, data, token_id=None):
16    self.data = data
17    if token_id:
18      self.token_id = token_id
19    else:
20      self.token_id = 'plain'
21
22
23class BuildFile(object):
24
25  def __init__(self, text, file_groups):
26    self._file_groups = file_groups
27    self._tokens = [token for token in self._Tokenize(text)]
28
29  def _Tokenize(self, text):
30    rest = text
31    token_regex = self._TokenRegex()
32    while len(rest):
33      m = token_regex.search(rest)
34      if not m:
35        # In `rest', we couldn't find a match.
36        # So, lump the entire `rest' into a token
37        # and stop producing any more tokens.
38        yield _Token(rest)
39        return
40      min_index, end_index, matched_token = self._ProcessMatch(m)
41
42      if min_index > 0:
43        yield _Token(rest[:min_index])
44
45      yield matched_token
46      rest = rest[end_index:]
47
48  def Update(self, files_by_group):
49    for token in self._tokens:
50      if token.token_id in files_by_group:
51        token.data = self._GetReplacementListAsString(
52            token.data,
53            files_by_group[token.token_id])
54
55  def Write(self, f):
56    for token in self._tokens:
57      f.write(token.data)
58
59  def _ProcessMatch(self, match):
60    raise NotImplementedError
61
62  def _TokenRegex(self):
63    raise NotImplementedError
64
65  def _GetReplacementListAsString(self, existing_list_as_string, filelist):
66    raise NotImplementedError
67
68
69class GypiFile(BuildFile):
70
71  def _ProcessMatch(self, match):
72    min_index = match.start(2)
73    end_index = match.end(2)
74    token = _Token(match.string[min_index:end_index],
75                   token_id=match.groups()[0])
76    return min_index, end_index, token
77
78  def _TokenRegex(self):
79    # regexp to match the following:
80    #   'file_group_name': [
81    #     'path/to/one/file.extension',
82    #     'another/file.ex',
83    #   ]
84    # In the match,
85    # group 1 is : 'file_group_name'
86    # group 2 is : """  'path/to/one/file.extension',\n  'another/file.ex',\n"""
87    regexp_str = r"'(%s)': \[\n(.+?) +\],?\n" % "|".join(self._file_groups)
88    return re.compile(regexp_str, re.MULTILINE | re.DOTALL)
89
90  def _GetReplacementListAsString(self, existing_list_as_string, filelist):
91    list_entry = existing_list_as_string.splitlines()[0]
92    prefix, _, suffix = list_entry.split("'")
93    return "".join(["'".join([prefix, filename, suffix + '\n'])
94                    for filename in filelist])
95
96
97def _GroupFiles(file_name_to_group_name_func, filenames):
98  file_groups = collections.defaultdict(lambda: [])
99  for filename in filenames:
100    file_groups[file_name_to_group_name_func(filename)].append(filename)
101  for group in file_groups:
102    file_groups[group].sort()
103  return file_groups
104
105
106def _UpdateBuildFile(filename, build_file_class):
107  with open(filename, 'r') as f:
108    build_file = build_file_class(f.read(), check_common.FILE_GROUPS)
109  files_by_group = _GroupFiles(check_common.GetFileGroupFromFileName,
110                               check_common.GetKnownFiles())
111  build_file.Update(files_by_group)
112  with open(filename, 'w') as f:
113    build_file.Write(f)
114
115
116def UpdateGypi():
117  tvp = tracing_project.TracingProject()
118  _UpdateBuildFile(
119      os.path.join(tvp.tracing_root_path, 'trace_viewer.gypi'), GypiFile)
120
121
122def Update():
123  UpdateGypi()
124