Home
last modified time | relevance | path

Searched refs:token (Results 1 – 25 of 2185) sorted by relevance

12345678910>>...88

/external/antlr/runtime/Ruby/test/functional/lexer/
Dbasic.rb20 token = lexer.next_token
21 token.name.should == 'ZERO'
23 token = lexer.next_token
24 token.name.should == '<EOF>'
30 token_types = lexer.map { |token| token.name }
38 token = lexer.next_token
62 token = lexer.next_token
63 token.name.should == 'ZERO'
65 token = lexer.next_token
66 token.name.should == 'ONE'
[all …]
/external/swiftshader/src/OpenGL/compiler/preprocessor/
DDirectiveParser.cpp49 static DirectiveType getDirective(const pp::Token *token) in getDirective() argument
65 if (token->type != pp::Token::IDENTIFIER) in getDirective()
68 if (token->text == kDirectiveDefine) in getDirective()
70 else if (token->text == kDirectiveUndef) in getDirective()
72 else if (token->text == kDirectiveIf) in getDirective()
74 else if (token->text == kDirectiveIfdef) in getDirective()
76 else if (token->text == kDirectiveIfndef) in getDirective()
78 else if (token->text == kDirectiveElse) in getDirective()
80 else if (token->text == kDirectiveElif) in getDirective()
82 else if (token->text == kDirectiveEndif) in getDirective()
[all …]
/external/antlr/runtime/C/src/
Dantlr3commontoken.c40 static pANTLR3_STRING getText (pANTLR3_COMMON_TOKEN token);
41 static void setText (pANTLR3_COMMON_TOKEN token, pANTLR3_STRING text);
42 static void setText8 (pANTLR3_COMMON_TOKEN token, pANTLR3_UINT8 text);
43 static ANTLR3_UINT32 getType (pANTLR3_COMMON_TOKEN token);
44 static void setType (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 type);
45 static ANTLR3_UINT32 getLine (pANTLR3_COMMON_TOKEN token);
46 static void setLine (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 line);
47 static ANTLR3_INT32 getCharPositionInLine (pANTLR3_COMMON_TOKEN token);
48 static void setCharPositionInLine (pANTLR3_COMMON_TOKEN token, ANTLR3_INT32 pos);
49 static ANTLR3_UINT32 getChannel (pANTLR3_COMMON_TOKEN token);
[all …]
/external/libxml2/doc/
Dapibuild.py406 def push(self, token): argument
407 self.tokens.insert(0, token);
414 def token(self): member in CLexer
633 def error(self, msg, token=-1): argument
638 if token != -1:
639 print("Got token ", token)
643 def debug(self, msg, token=-1): argument
645 if token != -1:
646 print("Got token ", token)
677 def parseComment(self, token): argument
[all …]
/external/doclava/src/com/google/doclava/apicheck/
DApiFile.java74 String token = tokenizer.getToken(); in parseApi() local
75 if (token == null) { in parseApi()
78 if ("package".equals(token)) { in parseApi()
81 throw new ApiParseException("expected package got " + token, tokenizer.getLine()); in parseApi()
93 String token; in parsePackage() local
97 token = tokenizer.requireToken(); in parsePackage()
98 assertIdent(tokenizer, token); in parsePackage()
99 name = token; in parsePackage()
101 token = tokenizer.requireToken(); in parsePackage()
102 if (!"{".equals(token)) { in parsePackage()
[all …]
/external/libxml2/
Drngparser.c59 const xmlChar *token; member
92 tokenPtr token; member
252 tokenPtr token; in xmlCRNGNextToken() local
256 token = &(ctxt->tokens[(ctxt->firstToken + ctxt->nbTokens) % MAX_TOKEN]); in xmlCRNGNextToken()
257 token->toktype = CRNG_NONE; in xmlCRNGNextToken()
291 token->toklen = cur - ctxt->cur; in xmlCRNGNextToken()
292 token->token = xmlDictLookup(ctxt->dict, ctxt->cur, token->toklen); in xmlCRNGNextToken()
293 token->toktype = CRNG_LITERAL_SEGMENT; in xmlCRNGNextToken()
294 token->prefix = NULL; in xmlCRNGNextToken()
308 token->toklen = 2; in xmlCRNGNextToken()
[all …]
/external/deqp-deps/glslang/glslang/MachineIndependent/preprocessor/
DPp.cpp101 int token = scanToken(ppToken); in CPPdefine() local
102 if (token != PpAtomIdentifier) { in CPPdefine()
104 return token; in CPPdefine()
116 token = scanToken(ppToken); in CPPdefine()
117 if (token == '(' && !ppToken->space) { in CPPdefine()
120 token = scanToken(ppToken); in CPPdefine()
121 if (mac.args.size() == 0 && token == ')') in CPPdefine()
123 if (token != PpAtomIdentifier) { in CPPdefine()
126 return token; in CPPdefine()
141 token = scanToken(ppToken); in CPPdefine()
[all …]
/external/antlr/runtime/Python/tests/
Dt011lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.IDENTIFIER
29 assert token.start == 0, token.start
30 assert token.stop == 5, token.stop
31 assert token.text == 'foobar', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.WS
35 assert token.start == 6, token.start
36 assert token.stop == 6, token.stop
37 assert token.text == ' ', token.text
[all …]
Dt010lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.IDENTIFIER
29 assert token.start == 0, token.start
30 assert token.stop == 5, token.stop
31 assert token.text == 'foobar', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.WS
35 assert token.start == 6, token.start
36 assert token.stop == 6, token.stop
37 assert token.text == ' ', token.text
[all …]
Dt004lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.FOO
29 assert token.start == 0, token.start
30 assert token.stop == 0, token.stop
31 assert token.text == 'f', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.FOO
35 assert token.start == 1, token.start
36 assert token.stop == 2, token.stop
37 assert token.text == 'fo', token.text
[all …]
Dt009lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.DIGIT
29 assert token.start == 0, token.start
30 assert token.stop == 0, token.stop
31 assert token.text == '0', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.DIGIT
35 assert token.start == 1, token.start
36 assert token.stop == 1, token.stop
37 assert token.text == '8', token.text
[all …]
Dt008lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.FOO
29 assert token.start == 0, token.start
30 assert token.stop == 0, token.stop
31 assert token.text == 'f', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.FOO
35 assert token.start == 1, token.start
36 assert token.stop == 2, token.stop
37 assert token.text == 'fa', token.text
[all …]
Dt005lexer.py27 token = lexer.nextToken()
28 assert token.type == self.lexerModule.FOO
29 assert token.start == 0, token.start
30 assert token.stop == 1, token.stop
31 assert token.text == 'fo', token.text
33 token = lexer.nextToken()
34 assert token.type == self.lexerModule.FOO
35 assert token.start == 2, token.start
36 assert token.stop == 4, token.stop
37 assert token.text == 'foo', token.text
[all …]
/external/pdfium/xfa/fxfa/fm2js/
Dcxfa_fmlexer_unittest.cpp15 std::unique_ptr<CXFA_FMToken> token = lexer.NextToken(); in TEST() local
16 EXPECT_EQ(TOKeof, token->m_type); in TEST()
21 std::unique_ptr<CXFA_FMToken> token = lexer->NextToken(); in TEST() local
23 EXPECT_EQ(TOKminus, token->m_type); in TEST()
24 token = lexer->NextToken(); in TEST()
25 EXPECT_EQ(L"12", token->m_string); in TEST()
26 token = lexer->NextToken(); in TEST()
27 EXPECT_EQ(TOKeof, token->m_type); in TEST()
30 token = lexer->NextToken(); in TEST()
31 EXPECT_EQ(TOKnumber, token->m_type); in TEST()
[all …]
/external/ImageMagick/MagickWand/tests/
Dscript-token-test-results.txt1 l=8, c=1, stat=0, len=64, token="-option"
2 l=8, c=9, stat=0, len=64, token="key"
3 l=9, c=1, stat=0, len=64, token="+reset"
4 l=9, c=10, stat=0, len=64, token="imbedded#hash"
5 l=11, c=1, stat=0, len=64, token="This is a single token"
6 l=13, c=1, stat=0, len=64, token="And\ 'even '"more "complex"
7 l=15, c=1, stat=0, len=64, token="Backslash chars \n are returned as is"
8 l=16, c=1, stat=0, len=64, token="regardless \n of quoting"
9 l=18, c=1, stat=0, len=64, token="Single quote escapes"
10 l=19, c=2, stat=0, len=64, token="'"
[all …]
/external/vulkan-validation-layers/tests/
Dvktestframework.cpp373 const char *token = strtok(config, delims); in ProcessConfigFile() local
374 while (token) { in ProcessConfigFile()
382 if (strcmp(token, "MaxLights") == 0) in ProcessConfigFile()
384 else if (strcmp(token, "MaxClipPlanes") == 0) in ProcessConfigFile()
386 else if (strcmp(token, "MaxTextureUnits") == 0) in ProcessConfigFile()
388 else if (strcmp(token, "MaxTextureCoords") == 0) in ProcessConfigFile()
390 else if (strcmp(token, "MaxVertexAttribs") == 0) in ProcessConfigFile()
392 else if (strcmp(token, "MaxVertexUniformComponents") == 0) in ProcessConfigFile()
394 else if (strcmp(token, "MaxVaryingFloats") == 0) in ProcessConfigFile()
396 else if (strcmp(token, "MaxVertexTextureImageUnits") == 0) in ProcessConfigFile()
[all …]
/external/apache-xml/src/main/java/org/apache/xml/utils/
DStylesheetPIHandler.java156 String token = ""; in processingInstruction() local
160 token = tokenizer.nextToken(); in processingInstruction()
164 (token.equals(" ") || token.equals("\t") || token.equals("="))) in processingInstruction()
167 String name = token; in processingInstruction()
170 token = tokenizer.nextToken(); in processingInstruction()
172 (token.equals(" " ) || token.equals("\t") || token.equals("="))) in processingInstruction()
173 token = tokenizer.nextToken(); in processingInstruction()
174 type = token.substring(1, token.length() - 1); in processingInstruction()
179 token = tokenizer.nextToken(); in processingInstruction()
181 (token.equals(" " ) || token.equals("\t") || token.equals("="))) in processingInstruction()
[all …]
/external/antlr/runtime/ObjC/Framework/test/runtime/token/
DCommonTokenTest.m17 CommonToken *token = [[CommonToken newToken] retain];
18 STAssertNotNil(token, @"Token was nil");
19 [token release];
24 CommonToken *token = [[CommonToken eofToken] retain];
25 STAssertNotNil(token, @"Token was nil");
26 STAssertEquals(token.type, (NSInteger)TokenTypeEOF, @"Token was not of type TokenTypeEOF");
27 [token release];
32 CommonToken *token = [[CommonToken newToken:TokenTypeUP] retain];
33 token.text = @"<UP>";
34 STAssertNotNil(token, @"Token was nil");
[all …]
/external/jsoncpp/src/lib_json/
Djson_reader.cpp131 Token token; in parse() local
132 skipCommentTokens(token); in parse()
139 token.type_ = tokenError; in parse()
140 token.start_ = beginDoc; in parse()
141 token.end_ = endDoc; in parse()
144 token); in parse()
152 Token token; in readValue() local
153 skipCommentTokens(token); in readValue()
169 switch (token.type_) { in readValue()
171 successful = readObject(token); in readValue()
[all …]
/external/libnl/lib/route/cls/
Dematch_syntax.y62 %token <i> ERROR LOGIC NOT OPERAND NUMBER ALIGN LAYER
63 %token <i> KW_OPEN "("
64 %token <i> KW_CLOSE ")"
65 %token <i> KW_PLUS "+"
66 %token <i> KW_MASK "mask"
67 %token <i> KW_SHIFT ">>"
68 %token <i> KW_AT "at"
69 %token <i> EMATCH_CMP "cmp"
70 %token <i> EMATCH_NBYTE "pattern"
71 %token <i> EMATCH_TEXT "text"
[all …]
/external/googletest/googlemock/scripts/generator/cpp/
Dast.py287 for token in self.alias:
288 if token is not None and name == token.name:
342 for token in token_list:
343 if token.name == node.name:
467 token = tokens[end]
469 if token.name == '<':
471 elif token.name == '>':
508 token = tokens[i]
509 if token.name == '<':
516 elif token.name == ',':
[all …]
/external/antlr/runtime/Python3/tests/
Dt011lexer.py27 token = lexer.nextToken()
28 self.assertEqual(token.type, self.lexerModule.IDENTIFIER)
29 self.assertEqual(token.start, 0)
30 self.assertEqual(token.stop, 5)
31 self.assertEqual(token.text, 'foobar')
33 token = lexer.nextToken()
34 self.assertEqual(token.type, self.lexerModule.WS)
35 self.assertEqual(token.start, 6)
36 self.assertEqual(token.stop, 6)
37 self.assertEqual(token.text, ' ')
[all …]
Dt010lexer.py27 token = lexer.nextToken()
28 self.assertEqual(token.type, self.lexerModule.IDENTIFIER)
29 self.assertEqual(token.start, 0)
30 self.assertEqual(token.stop, 5)
31 self.assertEqual(token.text, 'foobar')
33 token = lexer.nextToken()
34 self.assertEqual(token.type, self.lexerModule.WS)
35 self.assertEqual(token.start, 6)
36 self.assertEqual(token.stop, 6)
37 self.assertEqual(token.text, ' ')
[all …]
/external/snakeyaml/src/main/java/org/yaml/snakeyaml/parser/
DParserImpl.java183 StreamStartToken token = (StreamStartToken) scanner.getToken(); in produce() local
184 Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark()); in produce()
196 Token token = scanner.peekToken(); in produce() local
197 Mark startMark = token.getStartMark(); in produce()
220 Token token = scanner.peekToken(); in produce() local
221 Mark startMark = token.getStartMark(); in produce()
227 token = scanner.getToken(); in produce()
228 Mark endMark = token.getEndMark(); in produce()
235 StreamEndToken token = (StreamEndToken) scanner.getToken(); in produce() local
236 event = new StreamEndEvent(token.getStartMark(), token.getEndMark()); in produce()
[all …]
/external/python/cpython3/Lib/email/
D_header_value_parser.py139 for token in self:
140 comments.extend(token.comments)
157 for token in self:
158 if not hasattr(token, '_pp'):
160 'list: {!r}'.format(token))
162 yield from token._pp(indent+' ')
241 for token in self:
242 if token.token_type == 'bare-quoted-string':
243 return token.value
663 for token in self:
[all …]

12345678910>>...88