/external/antlr/antlr-3.4/runtime/Ruby/test/functional/lexer/ |
D | basic.rb | 20 token = lexer.next_token 21 token.name.should == 'ZERO' 23 token = lexer.next_token 24 token.name.should == '<EOF>' 30 token_types = lexer.map { |token| token.name } 38 token = lexer.next_token 62 token = lexer.next_token 63 token.name.should == 'ZERO' 65 token = lexer.next_token 66 token.name.should == 'ONE' [all …]
|
/external/swiftshader/src/OpenGL/compiler/preprocessor/ |
D | DirectiveParser.cpp | 48 static DirectiveType getDirective(const pp::Token* token) in getDirective() argument 64 if (token->type != pp::Token::IDENTIFIER) in getDirective() 67 if (token->text == kDirectiveDefine) in getDirective() 69 else if (token->text == kDirectiveUndef) in getDirective() 71 else if (token->text == kDirectiveIf) in getDirective() 73 else if (token->text == kDirectiveIfdef) in getDirective() 75 else if (token->text == kDirectiveIfndef) in getDirective() 77 else if (token->text == kDirectiveElse) in getDirective() 79 else if (token->text == kDirectiveElif) in getDirective() 81 else if (token->text == kDirectiveEndif) in getDirective() [all …]
|
/external/antlr/antlr-3.4/runtime/C/src/ |
D | antlr3commontoken.c | 40 static pANTLR3_STRING getText (pANTLR3_COMMON_TOKEN token); 41 static void setText (pANTLR3_COMMON_TOKEN token, pANTLR3_STRING text); 42 static void setText8 (pANTLR3_COMMON_TOKEN token, pANTLR3_UINT8 text); 43 static ANTLR3_UINT32 getType (pANTLR3_COMMON_TOKEN token); 44 static void setType (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 type); 45 static ANTLR3_UINT32 getLine (pANTLR3_COMMON_TOKEN token); 46 static void setLine (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 line); 47 static ANTLR3_INT32 getCharPositionInLine (pANTLR3_COMMON_TOKEN token); 48 static void setCharPositionInLine (pANTLR3_COMMON_TOKEN token, ANTLR3_INT32 pos); 49 static ANTLR3_UINT32 getChannel (pANTLR3_COMMON_TOKEN token); [all …]
|
/external/libxml2/doc/ |
D | apibuild.py | 406 def push(self, token): argument 407 self.tokens.insert(0, token); 414 def token(self): member in CLexer 633 def error(self, msg, token=-1): argument 638 if token != -1: 639 print("Got token ", token) 643 def debug(self, msg, token=-1): argument 645 if token != -1: 646 print("Got token ", token) 677 def parseComment(self, token): argument [all …]
|
/external/libxml2/ |
D | rngparser.c | 59 const xmlChar *token; member 92 tokenPtr token; member 252 tokenPtr token; in xmlCRNGNextToken() local 256 token = &(ctxt->tokens[(ctxt->firstToken + ctxt->nbTokens) % MAX_TOKEN]); in xmlCRNGNextToken() 257 token->toktype = CRNG_NONE; in xmlCRNGNextToken() 291 token->toklen = cur - ctxt->cur; in xmlCRNGNextToken() 292 token->token = xmlDictLookup(ctxt->dict, ctxt->cur, token->toklen); in xmlCRNGNextToken() 293 token->toktype = CRNG_LITERAL_SEGMENT; in xmlCRNGNextToken() 294 token->prefix = NULL; in xmlCRNGNextToken() 308 token->toklen = 2; in xmlCRNGNextToken() [all …]
|
/external/doclava/src/com/google/doclava/apicheck/ |
D | ApiFile.java | 74 String token = tokenizer.getToken(); in parseApi() local 75 if (token == null) { in parseApi() 78 if ("package".equals(token)) { in parseApi() 81 throw new ApiParseException("expected package got " + token, tokenizer.getLine()); in parseApi() 93 String token; in parsePackage() local 97 token = tokenizer.requireToken(); in parsePackage() 98 assertIdent(tokenizer, token); in parsePackage() 99 name = token; in parsePackage() 101 token = tokenizer.requireToken(); in parsePackage() 102 if (!"{".equals(token)) { in parsePackage() [all …]
|
/external/antlr/antlr-3.4/runtime/Python/tests/ |
D | t010lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.IDENTIFIER 29 assert token.start == 0, token.start 30 assert token.stop == 5, token.stop 31 assert token.text == 'foobar', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.WS 35 assert token.start == 6, token.start 36 assert token.stop == 6, token.stop 37 assert token.text == ' ', token.text [all …]
|
D | t011lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.IDENTIFIER 29 assert token.start == 0, token.start 30 assert token.stop == 5, token.stop 31 assert token.text == 'foobar', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.WS 35 assert token.start == 6, token.start 36 assert token.stop == 6, token.stop 37 assert token.text == ' ', token.text [all …]
|
D | t004lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.FOO 29 assert token.start == 0, token.start 30 assert token.stop == 0, token.stop 31 assert token.text == 'f', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.FOO 35 assert token.start == 1, token.start 36 assert token.stop == 2, token.stop 37 assert token.text == 'fo', token.text [all …]
|
D | t008lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.FOO 29 assert token.start == 0, token.start 30 assert token.stop == 0, token.stop 31 assert token.text == 'f', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.FOO 35 assert token.start == 1, token.start 36 assert token.stop == 2, token.stop 37 assert token.text == 'fa', token.text [all …]
|
D | t009lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.DIGIT 29 assert token.start == 0, token.start 30 assert token.stop == 0, token.stop 31 assert token.text == '0', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.DIGIT 35 assert token.start == 1, token.start 36 assert token.stop == 1, token.stop 37 assert token.text == '8', token.text [all …]
|
D | t005lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.FOO 29 assert token.start == 0, token.start 30 assert token.stop == 1, token.stop 31 assert token.text == 'fo', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.FOO 35 assert token.start == 2, token.start 36 assert token.stop == 4, token.stop 37 assert token.text == 'foo', token.text [all …]
|
/external/ImageMagick/MagickWand/tests/ |
D | script-token-test-results.txt | 1 l=8, c=1, stat=0, len=64, token="-option" 2 l=8, c=9, stat=0, len=64, token="key" 3 l=9, c=1, stat=0, len=64, token="+reset" 4 l=9, c=10, stat=0, len=64, token="imbedded#hash" 5 l=11, c=1, stat=0, len=64, token="This is a single token" 6 l=13, c=1, stat=0, len=64, token="And\ 'even '"more "complex" 7 l=15, c=1, stat=0, len=64, token="Backslash chars \n are returned as is" 8 l=16, c=1, stat=0, len=64, token="regardless \n of quoting" 9 l=18, c=1, stat=0, len=64, token="Single quote escapes" 10 l=19, c=2, stat=0, len=64, token="'" [all …]
|
/external/apache-xml/src/main/java/org/apache/xml/utils/ |
D | StylesheetPIHandler.java | 156 String token = ""; in processingInstruction() local 160 token = tokenizer.nextToken(); in processingInstruction() 164 (token.equals(" ") || token.equals("\t") || token.equals("="))) in processingInstruction() 167 String name = token; in processingInstruction() 170 token = tokenizer.nextToken(); in processingInstruction() 172 (token.equals(" " ) || token.equals("\t") || token.equals("="))) in processingInstruction() 173 token = tokenizer.nextToken(); in processingInstruction() 174 type = token.substring(1, token.length() - 1); in processingInstruction() 179 token = tokenizer.nextToken(); in processingInstruction() 181 (token.equals(" " ) || token.equals("\t") || token.equals("="))) in processingInstruction() [all …]
|
/external/vulkan-validation-layers/tests/ |
D | vktestframework.cpp | 353 const char *token = strtok(config, delims); in ProcessConfigFile() local 354 while (token) { in ProcessConfigFile() 364 if (strcmp(token, "MaxLights") == 0) in ProcessConfigFile() 366 else if (strcmp(token, "MaxClipPlanes") == 0) in ProcessConfigFile() 368 else if (strcmp(token, "MaxTextureUnits") == 0) in ProcessConfigFile() 370 else if (strcmp(token, "MaxTextureCoords") == 0) in ProcessConfigFile() 372 else if (strcmp(token, "MaxVertexAttribs") == 0) in ProcessConfigFile() 374 else if (strcmp(token, "MaxVertexUniformComponents") == 0) in ProcessConfigFile() 376 else if (strcmp(token, "MaxVaryingFloats") == 0) in ProcessConfigFile() 378 else if (strcmp(token, "MaxVertexTextureImageUnits") == 0) in ProcessConfigFile() [all …]
|
/external/antlr/antlr-3.4/runtime/ObjC/Framework/test/runtime/token/ |
D | ANTLRCommonTokenTest.m | 17 ANTLRCommonToken *token = [[ANTLRCommonToken newToken] retain]; 18 STAssertNotNil(token, @"Token was nil"); 19 [token release]; 24 ANTLRCommonToken *token = [[ANTLRCommonToken eofToken] retain]; 25 STAssertNotNil(token, @"Token was nil"); 26 …STAssertEquals(token.type, (NSInteger)ANTLRTokenTypeEOF, @"Token was not of type ANTLRTokenTypeEOF… 27 [token release]; 32 ANTLRCommonToken *token = [[ANTLRCommonToken newToken:ANTLRTokenTypeUP] retain]; 33 token.text = @"<UP>"; 34 STAssertNotNil(token, @"Token was nil"); [all …]
|
/external/libnl/lib/route/cls/ |
D | ematch_syntax.y | 62 %token <i> ERROR LOGIC NOT OPERAND NUMBER ALIGN LAYER 63 %token <i> KW_OPEN "(" 64 %token <i> KW_CLOSE ")" 65 %token <i> KW_PLUS "+" 66 %token <i> KW_MASK "mask" 67 %token <i> KW_SHIFT ">>" 68 %token <i> KW_AT "at" 69 %token <i> EMATCH_CMP "cmp" 70 %token <i> EMATCH_NBYTE "pattern" 71 %token <i> EMATCH_TEXT "text" [all …]
|
/external/jsoncpp/src/lib_json/ |
D | json_reader.cpp | 131 Token token; in parse() local 132 skipCommentTokens(token); in parse() 139 token.type_ = tokenError; in parse() 140 token.start_ = beginDoc; in parse() 141 token.end_ = endDoc; in parse() 144 token); in parse() 152 Token token; in readValue() local 153 skipCommentTokens(token); in readValue() 169 switch (token.type_) { in readValue() 171 successful = readObject(token); in readValue() [all …]
|
/external/autotest/client/site_tests/platform_Pkcs11Events/ |
D | platform_Pkcs11Events.py | 18 for token in token_list: 19 shutil.rmtree(token, ignore_errors=True) 20 pkcs11.copytree_with_ownership(pkcs11.TMP_CHAPS_DIR, token) 23 for token in token_list: 25 (token, token)) 27 utils.system('chaps_client --unload --path=%s' % token) 30 for token in token_list: 32 (token, token)) 33 for token in token_list: 34 utils.system('chaps_client --unload --path=%s' % token) [all …]
|
/external/snakeyaml/src/main/java/org/yaml/snakeyaml/parser/ |
D | ParserImpl.java | 183 StreamStartToken token = (StreamStartToken) scanner.getToken(); in produce() local 184 Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark()); in produce() 196 Token token = scanner.peekToken(); in produce() local 197 Mark startMark = token.getStartMark(); in produce() 220 Token token = scanner.peekToken(); in produce() local 221 Mark startMark = token.getStartMark(); in produce() 227 token = scanner.getToken(); in produce() 228 Mark endMark = token.getEndMark(); in produce() 235 StreamEndToken token = (StreamEndToken) scanner.getToken(); in produce() local 236 event = new StreamEndEvent(token.getStartMark(), token.getEndMark()); in produce() [all …]
|
/external/v8/testing/gmock/scripts/generator/cpp/ |
D | ast.py | 287 for token in self.alias: 288 if token is not None and name == token.name: 342 for token in token_list: 343 if token.name == node.name: 467 token = tokens[end] 469 if token.name == '<': 471 elif token.name == '>': 508 token = tokens[i] 509 if token.name == '<': 516 elif token.name == ',': [all …]
|
/external/googletest/googlemock/scripts/generator/cpp/ |
D | ast.py | 287 for token in self.alias: 288 if token is not None and name == token.name: 342 for token in token_list: 343 if token.name == node.name: 467 token = tokens[end] 469 if token.name == '<': 471 elif token.name == '>': 508 token = tokens[i] 509 if token.name == '<': 516 elif token.name == ',': [all …]
|
/external/owasp/sanitizer/src/main/org/owasp/html/ |
D | CssGrammar.java | 79 if (!(it.hasTokenAfterSpace() && ":".equals(it.token()))) { in parsePropertyGroup() 96 String token = it.token(); in parsePropertyValue() local 103 handler.startFunction(token); in parsePropertyValue() 105 handler.endFunction(token); in parsePropertyValue() 108 handler.identifier(token); in parsePropertyValue() 111 if (token.length() == 4 || token.length() == 7) { in parsePropertyValue() 112 handler.hash(token); in parsePropertyValue() 116 handler.quotedString(token); in parsePropertyValue() 119 handler.url(token); in parsePropertyValue() 124 handler.quantity(token); in parsePropertyValue() [all …]
|
/external/google-breakpad/src/testing/scripts/generator/cpp/ |
D | ast.py | 286 for token in self.alias: 287 if token is not None and name == token.name: 341 for token in token_list: 342 if token.name == node.name: 466 token = tokens[end] 468 if token.name == '<': 470 elif token.name == '>': 506 token = tokens[i] 507 if token.name == '<': 514 elif token.name == ',': [all …]
|
/external/libchrome/base/strings/ |
D | string_tokenizer_unittest.cc | 20 EXPECT_EQ(string("this"), t.token()); in TEST() 23 EXPECT_EQ(string("is"), t.token()); in TEST() 26 EXPECT_EQ(string("a"), t.token()); in TEST() 29 EXPECT_EQ(string("test"), t.token()); in TEST() 40 EXPECT_EQ(string("this"), t.token()); in TEST() 43 EXPECT_EQ(string("is"), t.token()); in TEST() 46 EXPECT_EQ(string("a"), t.token()); in TEST() 49 EXPECT_EQ(string("test"), t.token()); in TEST() 62 EXPECT_EQ(string("this"), t.token()); in TEST() 65 EXPECT_EQ(string(" "), t.token()); in TEST() [all …]
|