Home
last modified time | relevance | path

Searched refs:_tokenize (Results 1 – 11 of 11) sorted by relevance

/external/minijail/tools/
Dparser_unittest.py41 def _tokenize(line): member in TokenizerTests
49 for token in TokenizerTests._tokenize('@include /minijail.policy')
56 for token in TokenizerTests._tokenize('@include ./minijail.policy')
62 [(token.type, token.value) for token in TokenizerTests._tokenize(
91 for token in TokenizerTests._tokenize(
106 TokenizerTests._tokenize('%invalid-token%')
117 def _tokenize(self, line): member in ParseConstantTests
124 self.parser.parse_value(self._tokenize('0x80000000')), 0x80000000)
127 self.parser.parse_value(self._tokenize('0x8000000000000000')),
135 self.parser.parse_value(self._tokenize('0x100000000'))
[all …]
/external/python/cpython3/Lib/
Dtokenize.py426 return _tokenize(rl_gen.__next__, encoding)
429 def _tokenize(readline, encoding): function
618 return _tokenize(readline, None)
655 tokens = _tokenize(sys.stdin.readline, None)
Dgettext.py86 def _tokenize(plural): function
183 result, nexttok = _parse(_tokenize(plural))
/external/python/cpython2/Lib/idlelib/
DEditorWindow.py1588 _tokenize = tokenize variable
1614 INDENT=_tokenize.INDENT,
1615 NAME=_tokenize.NAME,
1626 save_tabsize = _tokenize.tabsize
1627 _tokenize.tabsize = self.tabwidth
1630 _tokenize.tokenize(self.readline, self.tokeneater)
1631 except (_tokenize.TokenError, SyntaxError):
1636 _tokenize.tabsize = save_tabsize
/external/python/cpython2/Lib/
Dgettext.py84 def _tokenize(plural): function
177 result, nexttok = _parse(_tokenize(plural))
/external/python/cpython3/Lib/test/
Dtest_tokenize.py2 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
/external/python/cpython3/Doc/library/
Dtokenize.rst147 .. _tokenize-cli:
/external/libchrome/third_party/jinja2/
Denvironment.py524 def _tokenize(self, source, name, filename=None, state=None): member in Environment
Dparser.py40 self.stream = environment._tokenize(source, name, filename, state)
/external/python/jinja/src/jinja2/
Denvironment.py557 def _tokenize(self, source, name, filename=None, state=None): member in Environment
Dparser.py43 self.stream = environment._tokenize(source, name, filename, state)