1 // Copyright (C) 2015 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "tokenizer.h"
16 
17 #include <errno.h>
18 #include <gtest/gtest.h>
19 
20 #include <string>
21 
22 namespace init {
23 
24 #define SETUP_TEST(test_data)  \
25   std::string data(test_data); \
26   Tokenizer tokenizer(data);   \
27   ASSERT_EQ(Tokenizer::TOK_START, tokenizer.current().type)
28 
29 #define ASSERT_TEXT_TOKEN(test_text)              \
30   ASSERT_TRUE(tokenizer.Next());                  \
31   ASSERT_EQ(test_text, tokenizer.current().text); \
32   ASSERT_EQ(Tokenizer::TOK_TEXT, tokenizer.current().type)
33 
34 #define ASSERT_NEWLINE_TOKEN()   \
35   ASSERT_TRUE(tokenizer.Next()); \
36   ASSERT_EQ(Tokenizer::TOK_NEWLINE, tokenizer.current().type)
37 
38 TEST(Tokenizer, Empty) {
39   SETUP_TEST("");
40   ASSERT_FALSE(tokenizer.Next());
41 }
42 
43 TEST(Tokenizer, Simple) {
44   SETUP_TEST("test");
45   ASSERT_TEXT_TOKEN("test");
46   ASSERT_FALSE(tokenizer.Next());
47 }
48 
49 TEST(Tokenizer, LeadingWhiteSpace) {
50   SETUP_TEST(" \t  \r  test");
51   ASSERT_TEXT_TOKEN("test");
52   ASSERT_FALSE(tokenizer.Next());
53 }
54 
55 TEST(Tokenizer, TrailingWhiteSpace) {
56   SETUP_TEST("test \t  \r  ");
57   ASSERT_TEXT_TOKEN("test");
58   ASSERT_FALSE(tokenizer.Next());
59 }
60 
61 TEST(Tokenizer, WhiteSpace) {
62   SETUP_TEST(" \t  \r  test \t  \r  ");
63   ASSERT_TEXT_TOKEN("test");
64 
65   ASSERT_FALSE(tokenizer.Next());
66 }
67 
68 TEST(Tokenizer, TwoTokens) {
69   SETUP_TEST("  foo   bar ");
70   ASSERT_TEXT_TOKEN("foo");
71   ASSERT_TEXT_TOKEN("bar");
72 
73   ASSERT_FALSE(tokenizer.Next());
74 }
75 
76 TEST(Tokenizer, MultiToken) {
77   SETUP_TEST("one two three four five");
78   ASSERT_TEXT_TOKEN("one");
79   ASSERT_TEXT_TOKEN("two");
80   ASSERT_TEXT_TOKEN("three");
81   ASSERT_TEXT_TOKEN("four");
82   ASSERT_TEXT_TOKEN("five");
83 
84   ASSERT_FALSE(tokenizer.Next());
85 }
86 
87 TEST(Tokenizer, NewLine) {
88   SETUP_TEST("\n");
89   ASSERT_NEWLINE_TOKEN();
90 
91   ASSERT_FALSE(tokenizer.Next());
92 }
93 
94 TEST(Tokenizer, TextNewLine) {
95   SETUP_TEST("test\n");
96   ASSERT_TEXT_TOKEN("test");
97   ASSERT_NEWLINE_TOKEN();
98 
99   ASSERT_FALSE(tokenizer.Next());
100 }
101 
102 TEST(Tokenizer, MultiTextNewLine) {
103   SETUP_TEST("one\ntwo\nthree\n");
104   ASSERT_TEXT_TOKEN("one");
105   ASSERT_NEWLINE_TOKEN();
106   ASSERT_TEXT_TOKEN("two");
107   ASSERT_NEWLINE_TOKEN();
108   ASSERT_TEXT_TOKEN("three");
109   ASSERT_NEWLINE_TOKEN();
110 
111   ASSERT_FALSE(tokenizer.Next());
112 }
113 
114 TEST(Tokenizer, MultiTextNewLineNoLineEnding) {
115   SETUP_TEST("one\ntwo\nthree");
116   ASSERT_TEXT_TOKEN("one");
117   ASSERT_NEWLINE_TOKEN();
118   ASSERT_TEXT_TOKEN("two");
119   ASSERT_NEWLINE_TOKEN();
120   ASSERT_TEXT_TOKEN("three");
121 
122   ASSERT_FALSE(tokenizer.Next());
123 }
124 
125 TEST(Tokenizer, Comment) {
126   SETUP_TEST("#test");
127   ASSERT_FALSE(tokenizer.Next());
128 }
129 
130 TEST(Tokenizer, CommentWhiteSpace) {
131   SETUP_TEST(" \t  \r  #test \t  \r  ");
132   ASSERT_FALSE(tokenizer.Next());
133 }
134 
135 TEST(Tokenizer, CommentNewLine) {
136   SETUP_TEST(" #test   \n");
137   ASSERT_NEWLINE_TOKEN();
138   ASSERT_FALSE(tokenizer.Next());
139 }
140 
141 TEST(Tokenizer, CommentTwoNewLine) {
142   SETUP_TEST(" #test   \n#test");
143   ASSERT_NEWLINE_TOKEN();
144   ASSERT_FALSE(tokenizer.Next());
145 }
146 
147 TEST(Tokenizer, CommentWithText) {
148   SETUP_TEST("foo bar #test");
149   ASSERT_TEXT_TOKEN("foo");
150   ASSERT_TEXT_TOKEN("bar");
151   ASSERT_FALSE(tokenizer.Next());
152 }
153 
154 TEST(Tokenizer, CommentWithTextNoSpace) {
155   SETUP_TEST("foo bar#test");
156   ASSERT_TEXT_TOKEN("foo");
157   ASSERT_TEXT_TOKEN("bar");
158   ASSERT_FALSE(tokenizer.Next());
159 }
160 
161 TEST(Tokenizer, CommentWithTextLineFeed) {
162   SETUP_TEST("foo bar #test\n");
163   ASSERT_TEXT_TOKEN("foo");
164   ASSERT_TEXT_TOKEN("bar");
165   ASSERT_NEWLINE_TOKEN();
166   ASSERT_FALSE(tokenizer.Next());
167 }
168 
169 TEST(Tokenizer, CommentWithMultiTextLineFeed) {
170   SETUP_TEST("#blah\nfoo bar #test\n#blah");
171   ASSERT_NEWLINE_TOKEN();
172   ASSERT_TEXT_TOKEN("foo");
173   ASSERT_TEXT_TOKEN("bar");
174   ASSERT_NEWLINE_TOKEN();
175   ASSERT_FALSE(tokenizer.Next());
176 }
177 
178 TEST(Tokenizer, SimpleEscaped) {
179   SETUP_TEST("fo\\no bar");
180   ASSERT_TEXT_TOKEN("fo\\no");
181   ASSERT_TEXT_TOKEN("bar");
182   ASSERT_FALSE(tokenizer.Next());
183 }
184 
185 TEST(Tokenizer, EscapedLineContNoLineFeed) {
186   SETUP_TEST("fo\\no bar \\ hello");
187   ASSERT_TEXT_TOKEN("fo\\no");
188   ASSERT_TEXT_TOKEN("bar");
189   ASSERT_FALSE(tokenizer.Next());
190 }
191 
192 TEST(Tokenizer, EscapedLineContLineFeed) {
193   SETUP_TEST("fo\\no bar \\ hello\n");
194   ASSERT_TEXT_TOKEN("fo\\no");
195   ASSERT_TEXT_TOKEN("bar");
196   ASSERT_FALSE(tokenizer.Next());
197 }
198 
199 TEST(Tokenizer, EscapedLineCont) {
200   SETUP_TEST("fo\\no bar \\\ntest");
201   ASSERT_TEXT_TOKEN("fo\\no");
202   ASSERT_TEXT_TOKEN("bar");
203   ASSERT_TEXT_TOKEN("test");
204   ASSERT_FALSE(tokenizer.Next());
205 }
206 
207 TEST(Tokenizer, EscapedLineContWithBadChars) {
208   SETUP_TEST("fo\\no bar \\bad bad bad\ntest");
209   ASSERT_TEXT_TOKEN("fo\\no");
210   ASSERT_TEXT_TOKEN("bar");
211   ASSERT_TEXT_TOKEN("test");
212   ASSERT_FALSE(tokenizer.Next());
213 }
214 
215 TEST(Tokenizer, SimpleQuotes) {
216   SETUP_TEST("foo \"single token\" bar");
217   ASSERT_TEXT_TOKEN("foo");
218   ASSERT_TEXT_TOKEN("single token");
219   ASSERT_TEXT_TOKEN("bar");
220   ASSERT_FALSE(tokenizer.Next());
221 }
222 
223 TEST(Tokenizer, BadQuotes) {
224   SETUP_TEST("foo \"single token");
225   ASSERT_TEXT_TOKEN("foo");
226   ASSERT_TEXT_TOKEN("single token");
227   ASSERT_FALSE(tokenizer.Next());
228 }
229 
230 }  // namespace init
231