1from __future__ import print_function, division, absolute_import
2from __future__ import unicode_literals
3from fontTools.misc.py23 import *
4from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound
5from fontTools.feaLib.lexer import IncludingLexer, Lexer
6import os
7import shutil
8import tempfile
9import unittest
10
11
12def lex(s):
13    return [(typ, tok) for (typ, tok, _) in Lexer(s, "test.fea")]
14
15
16class LexerTest(unittest.TestCase):
17    def __init__(self, methodName):
18        unittest.TestCase.__init__(self, methodName)
19        # Python 3 renamed assertRaisesRegexp to assertRaisesRegex,
20        # and fires deprecation warnings if a program uses the old name.
21        if not hasattr(self, "assertRaisesRegex"):
22            self.assertRaisesRegex = self.assertRaisesRegexp
23
24    def test_empty(self):
25        self.assertEqual(lex(""), [])
26        self.assertEqual(lex(" \t "), [])
27
28    def test_name(self):
29        self.assertEqual(lex("a17"), [(Lexer.NAME, "a17")])
30        self.assertEqual(lex(".notdef"), [(Lexer.NAME, ".notdef")])
31        self.assertEqual(lex("two.oldstyle"), [(Lexer.NAME, "two.oldstyle")])
32        self.assertEqual(lex("_"), [(Lexer.NAME, "_")])
33        self.assertEqual(lex("\\table"), [(Lexer.NAME, "\\table")])
34        self.assertEqual(lex("a+*:^~!"), [(Lexer.NAME, "a+*:^~!")])
35        self.assertEqual(lex("with-dash"), [(Lexer.NAME, "with-dash")])
36
37    def test_cid(self):
38        self.assertEqual(lex("\\0 \\987"), [(Lexer.CID, 0), (Lexer.CID, 987)])
39
40    def test_glyphclass(self):
41        self.assertEqual(lex("@Vowel.sc"), [(Lexer.GLYPHCLASS, "Vowel.sc")])
42        self.assertEqual(lex("@Vowel-sc"), [(Lexer.GLYPHCLASS, "Vowel-sc")])
43        self.assertRaisesRegex(FeatureLibError,
44                               "Expected glyph class", lex, "@(a)")
45        self.assertRaisesRegex(FeatureLibError,
46                               "Expected glyph class", lex, "@ A")
47        self.assertRaisesRegex(FeatureLibError,
48                               "not be longer than 63 characters",
49                               lex, "@" + ("A" * 64))
50        self.assertRaisesRegex(FeatureLibError,
51                               "Glyph class names must consist of",
52                               lex, "@Ab:c")
53
54    def test_include(self):
55        self.assertEqual(lex("include (~/foo/bar baz.fea);"), [
56            (Lexer.NAME, "include"),
57            (Lexer.FILENAME, "~/foo/bar baz.fea"),
58            (Lexer.SYMBOL, ";")
59        ])
60        self.assertEqual(lex("include # Comment\n    (foo) \n;"), [
61            (Lexer.NAME, "include"),
62            (Lexer.COMMENT, "# Comment"),
63            (Lexer.FILENAME, "foo"),
64            (Lexer.SYMBOL, ";")
65        ])
66        self.assertRaises(FeatureLibError, lex, "include blah")
67        self.assertRaises(FeatureLibError, lex, "include (blah")
68
69    def test_number(self):
70        self.assertEqual(lex("123 -456"),
71                         [(Lexer.NUMBER, 123), (Lexer.NUMBER, -456)])
72        self.assertEqual(lex("0xCAFED00D"), [(Lexer.NUMBER, 0xCAFED00D)])
73        self.assertEqual(lex("0xcafed00d"), [(Lexer.NUMBER, 0xCAFED00D)])
74
75    def test_float(self):
76        self.assertEqual(lex("1.23 -4.5"),
77                         [(Lexer.FLOAT, 1.23), (Lexer.FLOAT, -4.5)])
78
79    def test_symbol(self):
80        self.assertEqual(lex("a'"), [(Lexer.NAME, "a"), (Lexer.SYMBOL, "'")])
81        self.assertEqual(lex("-A-B"),
82                         [(Lexer.SYMBOL, "-"), (Lexer.NAME, "A-B")])
83        self.assertEqual(
84            lex("foo - -2"),
85            [(Lexer.NAME, "foo"), (Lexer.SYMBOL, "-"), (Lexer.NUMBER, -2)])
86
87    def test_comment(self):
88        self.assertEqual(lex("# Comment\n#"),
89                         [(Lexer.COMMENT, "# Comment"), (Lexer.COMMENT, "#")])
90
91    def test_string(self):
92        self.assertEqual(lex('"foo" "bar"'),
93                         [(Lexer.STRING, "foo"), (Lexer.STRING, "bar")])
94        self.assertEqual(lex('"foo \nbar\r baz \r\nqux\n\n "'),
95                         [(Lexer.STRING, "foo bar baz qux ")])
96        # The lexer should preserve escape sequences because they have
97        # different interpretations depending on context. For better
98        # or for worse, that is how the OpenType Feature File Syntax
99        # has been specified; see section 9.e (name table) for examples.
100        self.assertEqual(lex(r'"M\00fcller-Lanc\00e9"'),  # 'nameid 9'
101                         [(Lexer.STRING, r"M\00fcller-Lanc\00e9")])
102        self.assertEqual(lex(r'"M\9fller-Lanc\8e"'),  # 'nameid 9 1'
103                         [(Lexer.STRING, r"M\9fller-Lanc\8e")])
104        self.assertRaises(FeatureLibError, lex, '"foo\n bar')
105
106    def test_bad_character(self):
107        self.assertRaises(FeatureLibError, lambda: lex("123 \u0001"))
108
109    def test_newline(self):
110        def lines(s):
111            return [loc[1] for (_, _, loc) in Lexer(s, "test.fea")]
112        self.assertEqual(lines("FOO\n\nBAR\nBAZ"), [1, 3, 4])  # Unix
113        self.assertEqual(lines("FOO\r\rBAR\rBAZ"), [1, 3, 4])  # Macintosh
114        self.assertEqual(lines("FOO\r\n\r\n BAR\r\nBAZ"), [1, 3, 4])  # Windows
115        self.assertEqual(lines("FOO\n\rBAR\r\nBAZ"), [1, 3, 4])  # mixed
116
117    def test_location(self):
118        def locs(s):
119            return ["%s:%d:%d" % loc for (_, _, loc) in Lexer(s, "test.fea")]
120        self.assertEqual(locs("a b # Comment\n12 @x"), [
121            "test.fea:1:1", "test.fea:1:3", "test.fea:1:5", "test.fea:2:1",
122            "test.fea:2:4"
123        ])
124
125    def test_scan_over_(self):
126        lexer = Lexer("abbacabba12", "test.fea")
127        self.assertEqual(lexer.pos_, 0)
128        lexer.scan_over_("xyz")
129        self.assertEqual(lexer.pos_, 0)
130        lexer.scan_over_("abc")
131        self.assertEqual(lexer.pos_, 9)
132        lexer.scan_over_("abc")
133        self.assertEqual(lexer.pos_, 9)
134        lexer.scan_over_("0123456789")
135        self.assertEqual(lexer.pos_, 11)
136
137    def test_scan_until_(self):
138        lexer = Lexer("foo'bar", "test.fea")
139        self.assertEqual(lexer.pos_, 0)
140        lexer.scan_until_("'")
141        self.assertEqual(lexer.pos_, 3)
142        lexer.scan_until_("'")
143        self.assertEqual(lexer.pos_, 3)
144
145
146class IncludingLexerTest(unittest.TestCase):
147    @staticmethod
148    def getpath(filename):
149        path, _ = os.path.split(__file__)
150        return os.path.join(path, "data", filename)
151
152    def test_include(self):
153        lexer = IncludingLexer(self.getpath("include/include4.fea"))
154        result = ['%s %s:%d' % (token, os.path.split(loc[0])[1], loc[1])
155                  for _, token, loc in lexer]
156        self.assertEqual(result, [
157            "I4a include4.fea:1",
158            "I3a include3.fea:1",
159            "I2a include2.fea:1",
160            "I1a include1.fea:1",
161            "I0 include0.fea:1",
162            "I1b include1.fea:3",
163            "; include2.fea:2",
164            "I2b include2.fea:3",
165            "; include3.fea:2",
166            "I3b include3.fea:3",
167            "; include4.fea:2",
168            "I4b include4.fea:3"
169        ])
170
171    def test_include_limit(self):
172        lexer = IncludingLexer(self.getpath("include/include6.fea"))
173        self.assertRaises(FeatureLibError, lambda: list(lexer))
174
175    def test_include_self(self):
176        lexer = IncludingLexer(self.getpath("include/includeself.fea"))
177        self.assertRaises(FeatureLibError, lambda: list(lexer))
178
179    def test_include_missing_file(self):
180        lexer = IncludingLexer(self.getpath("include/includemissingfile.fea"))
181        self.assertRaisesRegex(IncludedFeaNotFound,
182                               "includemissingfile.fea:1:8: missingfile.fea",
183                               lambda: list(lexer))
184
185    def test_featurefilepath_None(self):
186        lexer = IncludingLexer(UnicodeIO("# foobar"))
187        self.assertIsNone(lexer.featurefilepath)
188        files = set(loc[0] for _, _, loc in lexer)
189        self.assertIn("<features>", files)
190
191    def test_include_absolute_path(self):
192        with tempfile.NamedTemporaryFile(delete=False) as included:
193            included.write(tobytes("""
194                feature kern {
195                    pos A B -40;
196                } kern;
197                """, encoding="utf-8"))
198        including = UnicodeIO("include(%s);" % included.name)
199        try:
200            lexer = IncludingLexer(including)
201            files = set(loc[0] for _, _, loc in lexer)
202            self.assertIn(included.name, files)
203        finally:
204            os.remove(included.name)
205
206    def test_include_relative_to_cwd(self):
207        # save current working directory, to be restored later
208        cwd = os.getcwd()
209        tmpdir = tempfile.mkdtemp()
210        try:
211            # create new feature file in a temporary directory
212            with open(os.path.join(tmpdir, "included.fea"), "w",
213                      encoding="utf-8") as included:
214                included.write("""
215                    feature kern {
216                        pos A B -40;
217                    } kern;
218                    """)
219            # change current folder to the temporary dir
220            os.chdir(tmpdir)
221            # instantiate a new lexer that includes the above file
222            # using a relative path; the IncludingLexer does not
223            # itself have a path, because it was initialized from
224            # an in-memory stream, so it will use the current working
225            # directory to resolve relative include statements
226            lexer = IncludingLexer(UnicodeIO("include(included.fea);"))
227            files = set(loc[0] for _, _, loc in lexer)
228            expected = os.path.realpath(included.name)
229            self.assertIn(expected, files)
230        finally:
231            # remove temporary folder and restore previous working directory
232            os.chdir(cwd)
233            shutil.rmtree(tmpdir)
234
235
236if __name__ == "__main__":
237    import sys
238    sys.exit(unittest.main())
239