/external/chromium-trace/catapult/third_party/closure_linter/closure_linter/ |
D | javascriptstatetracker_test.py | 70 start_token = function.start_token 75 function.start_token.type) 77 self.assertEquals('function', start_token.string) 78 self.assertEquals(3, start_token.line_number) 79 self.assertEquals(0, start_token.start_index) 93 start_token = function.start_token 98 function.start_token.type) 100 self.assertEquals('function', start_token.string) 101 self.assertEquals(11, start_token.line_number) 102 self.assertEquals(10, start_token.start_index) [all …]
|
D | aliaspass_test.py | 32 def _GetTokenByLineAndString(start_token, string, line_number): argument 33 for token in start_token: 41 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCOPE_SCRIPT) 46 alias_pass.Process(start_token) 57 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_ALIAS_SCRIPT) 59 alias_pass.Process(start_token) 61 alias_token = _GetTokenByLineAndString(start_token, 'Event', 4) 64 my_class_token = _GetTokenByLineAndString(start_token, 'myClass', 9) 67 component_token = _GetTokenByLineAndString(start_token, 'Component', 17) 71 event_token = _GetTokenByLineAndString(start_token, 'Event.Something', 17) [all …]
|
D | error_fixer_test.py | 36 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT) 37 second_token = start_token.next 38 self.error_fixer.HandleFile('test_file', start_token) 40 self.error_fixer._DeleteToken(start_token) 45 start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT) 46 fourth_token = start_token.next.next.next 47 self.error_fixer.HandleFile('test_file', start_token) 49 self.error_fixer._DeleteTokens(start_token, 3)
|
D | testutil.py | 57 start_token = TokenizeSource(source) 59 ecma_pass.Process(start_token) 60 return start_token 73 start_token = TokenizeSourceAndRunEcmaPass(source) 77 tracker.DocFlagPass(start_token, error_handler) 81 for token in start_token:
|
D | runner.py | 51 def _GetLastNonWhiteSpaceToken(start_token): argument 57 for t in start_token: 78 start_token = tokenizer.TokenizeFile(fileobj) 79 return start_token, tokenizer.mode 151 def RunMetaDataPass(start_token, metadata_pass, error_handler, filename=''): argument 165 metadata_pass.Process(start_token) 185 def _RunChecker(start_token, error_handler, argument 195 style_checker.Check(start_token,
|
D | scopeutil_test.py | 31 def _FindContexts(start_token): argument 47 for token in start_token: 74 start_token = testutil.TokenizeSourceAndRunEcmaPass(script) 76 start_token, ecmametadatapass.EcmaContext.VAR) 129 start_token = testutil.TokenizeSourceAndRunEcmaPass(source) 130 for context in _FindContexts(start_token): 171 self.start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT) 175 for context in _FindContexts(self.start_token): 193 for context in _FindContexts(self.start_token):
|
D | tokenutil.py | 99 def CustomSearch(start_token, func, end_func=None, distance=None, argument 118 token = start_token 148 def Search(start_token, token_types, distance=None, reverse=False): argument 164 return CustomSearch(start_token, lambda token: token.IsAnyType(token_types), 168 def SearchExcept(start_token, token_types, distance=None, reverse=False): argument 184 return CustomSearch(start_token, 189 def SearchUntil(start_token, token_types, end_types, distance=None, argument 207 return CustomSearch(start_token, lambda token: token.IsAnyType(token_types), 451 def GetTokenRange(start_token, end_token): argument 464 token = start_token [all …]
|
D | checker.py | 66 def Check(self, start_token, limited_doc_checks=False, is_html=False, argument 81 self._state_tracker.DocFlagPass(start_token, self._error_handler) 84 self._alias_pass.Process(start_token) 90 self._ExecutePass(start_token, self._DependencyPass, stop_token) 92 self._ExecutePass(start_token, self._LintPass, stop_token)
|
D | aliaspass.py | 100 def Process(self, start_token): argument 107 if start_token is None: 111 self._CheckGoogScopeCalls(start_token) 115 context = start_token.metadata.context 119 def _CheckGoogScopeCalls(self, start_token): argument 127 scope_tokens = [t for t in start_token if IsScopeToken(t)] 212 token = context.start_token
|
D | closurizednamespacesinfo.py | 447 start_token = tokenutil.GetIdentifierStart(token) 448 if start_token and start_token != token: 450 identifier = tokenutil.GetIdentifierForToken(start_token) 452 start_token = token 454 if (start_token and 455 start_token.metadata and 456 start_token.metadata.aliased_symbol and 457 not start_token.metadata.is_alias_definition): 458 identifier = start_token.metadata.aliased_symbol
|
D | scopeutil.py | 58 last_code_token = parent.start_token.metadata.last_code 82 reversed(block_context.start_token)) 137 if context.start_token and context.end_token: 138 statement_tokens = tokenutil.GetTokenRange(context.start_token,
|
D | statetracker.py | 316 def __init__(self, start_token): argument 323 self.start_token = start_token 420 for token in self.start_token: 625 def _GetNextPartialIdentifierToken(start_token): argument 638 token = start_token.next 651 def _GetEndTokenAndContents(start_token): argument 664 iterator = start_token 741 self.start_token = None 782 def DocFlagPass(self, start_token, error_handler): argument 792 if not start_token: [all …]
|
D | error_fixer.py | 277 start_token = token.attached_object.type_start_token 279 if start_token.type != Type.DOC_START_BRACE: 281 len(start_token.string) - len(start_token.string.lstrip())) 283 start_token = tokenutil.SplitToken(start_token, leading_space) 285 if token.attached_object.type_end_token == start_token.previous: 286 token.attached_object.type_end_token = start_token 288 new_token = Token('{', Type.DOC_START_BRACE, start_token.line, 289 start_token.line_number) 290 tokenutil.InsertTokenAfter(new_token, start_token.previous)
|
D | indentation.py | 155 start_token = self._PopTo(Type.START_BLOCK) 157 if start_token: 158 goog_scope = tokenutil.GoogScopeOrNoneFromStartBlock(start_token.token) 177 (start_token.line_number)), 451 start_token = token 454 if not token or token.line_number != start_token.line_number: 459 def _AllFunctionPropertyAssignTokens(self, start_token, end_token): argument 470 for token in tokenutil.GetTokenRange(start_token, end_token):
|
D | ecmametadatapass.py | 118 def __init__(self, context_type, start_token, parent=None): argument 134 self.start_token = start_token 176 return tokenutil.Compare(context1.start_token, context2.start_token) 337 keyword_token = result.start_token.metadata.last_code 349 start_block_token = pre_keyword_token.metadata.context.start_token
|
D | javascriptstatetracker.py | 146 start_token = self._block_stack.pop() 147 if tokenutil.GoogScopeOrNoneFromStartBlock(start_token):
|
D | checkerbase.py | 129 def Check(self, start_token, limited_doc_checks=False, is_html=False, argument 142 self._ExecutePass(start_token, self._LintPass, stop_token=stop_token)
|
D | javascriptlintrules.py | 206 not doc_comment.start_token.previous) 227 block_start = doc_comment.start_token
|
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/ |
D | debug.rb | 96 def set_token_boundaries( tree, start_token, stop_token ) argument 97 super( tree, start_token, stop_token ) 98 return unless tree && start_token && stop_token 100 start_token.token_index, stop_token.token_index )
|
/external/pdfium/core/src/fpdfapi/fpdf_parser/ |
D | fpdf_parser_utility.cpp | 228 FX_BOOL CPDF_SimpleParser::FindTagPair(const CFX_ByteStringC& start_token, in FindTagPair() argument 232 if (!start_token.IsEmpty()) { in FindTagPair() 233 if (!SkipWord(start_token)) { in FindTagPair()
|
/external/mksh/src/ |
D | syn.c | 29 int start_token; /* token than began nesting (eg, FOR) */ member 868 if (nesting.start_token) { in syntaxerr() 869 c = nesting.start_token; in syntaxerr() 909 nesting.start_token = tok; in nesting_push() 938 nesting.start_token = 0; in compile()
|
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/ |
D | tree.rb | 779 def set_token_boundaries( tree, start_token = nil, stop_token = nil ) argument 782 start_token and start = start_token.index
|
/external/protobuf/src/google/protobuf/compiler/ |
D | parser.cc | 1170 io::Tokenizer::Token start_token; in ParseExtensions() local 1175 start_token = input_->current(); in ParseExtensions() 1193 end_location.StartAt(start_token); in ParseExtensions() 1194 end_location.EndAt(start_token); in ParseExtensions()
|
/external/pdfium/core/include/fpdfapi/ |
D | fpdf_parser.h | 212 FX_BOOL FindTagPair(const CFX_ByteStringC& start_token,
|
/external/bison/doc/ |
D | bison.texi | 11362 @code{yylex} function. Make sure a variable @code{start_token} is 11370 if (start_token) 11372 int t = start_token; 11373 start_token = 0;
|