source: trunk/server/lib/dnspython/tests/tokenizer.py

Last change on this file was 745, checked in by Silvan Scherrer, 13 years ago

Samba Server: updated trunk to 3.6.0

File size: 6.8 KB
Line 
1# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
2#
3# Permission to use, copy, modify, and distribute this software and its
4# documentation for any purpose with or without fee is hereby granted,
5# provided that the above copyright notice and this permission notice
6# appear in all copies.
7#
8# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
9# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
11# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
14# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15
16import unittest
17
18import dns.exception
19import dns.tokenizer
20
21Token = dns.tokenizer.Token
22
23class TokenizerTestCase(unittest.TestCase):
24
25 def testQuotedString1(self):
26 tok = dns.tokenizer.Tokenizer(r'"foo"')
27 token = tok.get()
28 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo'))
29
30 def testQuotedString2(self):
31 tok = dns.tokenizer.Tokenizer(r'""')
32 token = tok.get()
33 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, ''))
34
35 def testQuotedString3(self):
36 tok = dns.tokenizer.Tokenizer(r'"\"foo\""')
37 token = tok.get()
38 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, '"foo"'))
39
40 def testQuotedString4(self):
41 tok = dns.tokenizer.Tokenizer(r'"foo\010bar"')
42 token = tok.get()
43 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo\x0abar'))
44
45 def testQuotedString5(self):
46 def bad():
47 tok = dns.tokenizer.Tokenizer(r'"foo')
48 token = tok.get()
49 self.failUnlessRaises(dns.exception.UnexpectedEnd, bad)
50
51 def testQuotedString6(self):
52 def bad():
53 tok = dns.tokenizer.Tokenizer(r'"foo\01')
54 token = tok.get()
55 self.failUnlessRaises(dns.exception.SyntaxError, bad)
56
57 def testQuotedString7(self):
58 def bad():
59 tok = dns.tokenizer.Tokenizer('"foo\nbar"')
60 token = tok.get()
61 self.failUnlessRaises(dns.exception.SyntaxError, bad)
62
63 def testEmpty1(self):
64 tok = dns.tokenizer.Tokenizer('')
65 token = tok.get()
66 self.failUnless(token.is_eof())
67
68 def testEmpty2(self):
69 tok = dns.tokenizer.Tokenizer('')
70 token1 = tok.get()
71 token2 = tok.get()
72 self.failUnless(token1.is_eof() and token2.is_eof())
73
74 def testEOL(self):
75 tok = dns.tokenizer.Tokenizer('\n')
76 token1 = tok.get()
77 token2 = tok.get()
78 self.failUnless(token1.is_eol() and token2.is_eof())
79
80 def testWS1(self):
81 tok = dns.tokenizer.Tokenizer(' \n')
82 token1 = tok.get()
83 self.failUnless(token1.is_eol())
84
85 def testWS2(self):
86 tok = dns.tokenizer.Tokenizer(' \n')
87 token1 = tok.get(want_leading=True)
88 self.failUnless(token1.is_whitespace())
89
90 def testComment1(self):
91 tok = dns.tokenizer.Tokenizer(' ;foo\n')
92 token1 = tok.get()
93 self.failUnless(token1.is_eol())
94
95 def testComment2(self):
96 tok = dns.tokenizer.Tokenizer(' ;foo\n')
97 token1 = tok.get(want_comment = True)
98 token2 = tok.get()
99 self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo') and
100 token2.is_eol())
101
102 def testComment3(self):
103 tok = dns.tokenizer.Tokenizer(' ;foo bar\n')
104 token1 = tok.get(want_comment = True)
105 token2 = tok.get()
106 self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo bar') and
107 token2.is_eol())
108
109 def testMultiline1(self):
110 tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)')
111 tokens = list(iter(tok))
112 self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
113 Token(dns.tokenizer.IDENTIFIER, 'bar')])
114
115 def testMultiline2(self):
116 tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)\n')
117 tokens = list(iter(tok))
118 self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
119 Token(dns.tokenizer.IDENTIFIER, 'bar'),
120 Token(dns.tokenizer.EOL, '\n')])
121 def testMultiline3(self):
122 def bad():
123 tok = dns.tokenizer.Tokenizer('foo)')
124 tokens = list(iter(tok))
125 self.failUnlessRaises(dns.exception.SyntaxError, bad)
126
127 def testMultiline4(self):
128 def bad():
129 tok = dns.tokenizer.Tokenizer('((foo)')
130 tokens = list(iter(tok))
131 self.failUnlessRaises(dns.exception.SyntaxError, bad)
132
133 def testUnget1(self):
134 tok = dns.tokenizer.Tokenizer('foo')
135 t1 = tok.get()
136 tok.unget(t1)
137 t2 = tok.get()
138 self.failUnless(t1 == t2 and t1.ttype == dns.tokenizer.IDENTIFIER and \
139 t1.value == 'foo')
140
141 def testUnget2(self):
142 def bad():
143 tok = dns.tokenizer.Tokenizer('foo')
144 t1 = tok.get()
145 tok.unget(t1)
146 tok.unget(t1)
147 self.failUnlessRaises(dns.tokenizer.UngetBufferFull, bad)
148
149 def testGetEOL1(self):
150 tok = dns.tokenizer.Tokenizer('\n')
151 t = tok.get_eol()
152 self.failUnless(t == '\n')
153
154 def testGetEOL2(self):
155 tok = dns.tokenizer.Tokenizer('')
156 t = tok.get_eol()
157 self.failUnless(t == '')
158
159 def testEscapedDelimiter1(self):
160 tok = dns.tokenizer.Tokenizer(r'ch\ ld')
161 t = tok.get()
162 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ ld')
163
164 def testEscapedDelimiter2(self):
165 tok = dns.tokenizer.Tokenizer(r'ch\032ld')
166 t = tok.get()
167 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\032ld')
168
169 def testEscapedDelimiter3(self):
170 tok = dns.tokenizer.Tokenizer(r'ch\ild')
171 t = tok.get()
172 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ild')
173
174 def testEscapedDelimiter1u(self):
175 tok = dns.tokenizer.Tokenizer(r'ch\ ld')
176 t = tok.get().unescape()
177 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch ld')
178
179 def testEscapedDelimiter2u(self):
180 tok = dns.tokenizer.Tokenizer(r'ch\032ld')
181 t = tok.get().unescape()
182 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == 'ch ld')
183
184 def testEscapedDelimiter3u(self):
185 tok = dns.tokenizer.Tokenizer(r'ch\ild')
186 t = tok.get().unescape()
187 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'child')
188
189if __name__ == '__main__':
190 unittest.main()
Note: See TracBrowser for help on using the repository browser.