1 | tutorial_tests = """
|
---|
2 | Let's try a simple generator:
|
---|
3 |
|
---|
4 | >>> def f():
|
---|
5 | ... yield 1
|
---|
6 | ... yield 2
|
---|
7 |
|
---|
8 | >>> for i in f():
|
---|
9 | ... print i
|
---|
10 | 1
|
---|
11 | 2
|
---|
12 | >>> g = f()
|
---|
13 | >>> g.next()
|
---|
14 | 1
|
---|
15 | >>> g.next()
|
---|
16 | 2
|
---|
17 |
|
---|
18 | "Falling off the end" stops the generator:
|
---|
19 |
|
---|
20 | >>> g.next()
|
---|
21 | Traceback (most recent call last):
|
---|
22 | File "<stdin>", line 1, in ?
|
---|
23 | File "<stdin>", line 2, in g
|
---|
24 | StopIteration
|
---|
25 |
|
---|
26 | "return" also stops the generator:
|
---|
27 |
|
---|
28 | >>> def f():
|
---|
29 | ... yield 1
|
---|
30 | ... return
|
---|
31 | ... yield 2 # never reached
|
---|
32 | ...
|
---|
33 | >>> g = f()
|
---|
34 | >>> g.next()
|
---|
35 | 1
|
---|
36 | >>> g.next()
|
---|
37 | Traceback (most recent call last):
|
---|
38 | File "<stdin>", line 1, in ?
|
---|
39 | File "<stdin>", line 3, in f
|
---|
40 | StopIteration
|
---|
41 | >>> g.next() # once stopped, can't be resumed
|
---|
42 | Traceback (most recent call last):
|
---|
43 | File "<stdin>", line 1, in ?
|
---|
44 | StopIteration
|
---|
45 |
|
---|
46 | "raise StopIteration" stops the generator too:
|
---|
47 |
|
---|
48 | >>> def f():
|
---|
49 | ... yield 1
|
---|
50 | ... raise StopIteration
|
---|
51 | ... yield 2 # never reached
|
---|
52 | ...
|
---|
53 | >>> g = f()
|
---|
54 | >>> g.next()
|
---|
55 | 1
|
---|
56 | >>> g.next()
|
---|
57 | Traceback (most recent call last):
|
---|
58 | File "<stdin>", line 1, in ?
|
---|
59 | StopIteration
|
---|
60 | >>> g.next()
|
---|
61 | Traceback (most recent call last):
|
---|
62 | File "<stdin>", line 1, in ?
|
---|
63 | StopIteration
|
---|
64 |
|
---|
65 | However, they are not exactly equivalent:
|
---|
66 |
|
---|
67 | >>> def g1():
|
---|
68 | ... try:
|
---|
69 | ... return
|
---|
70 | ... except:
|
---|
71 | ... yield 1
|
---|
72 | ...
|
---|
73 | >>> list(g1())
|
---|
74 | []
|
---|
75 |
|
---|
76 | >>> def g2():
|
---|
77 | ... try:
|
---|
78 | ... raise StopIteration
|
---|
79 | ... except:
|
---|
80 | ... yield 42
|
---|
81 | >>> print list(g2())
|
---|
82 | [42]
|
---|
83 |
|
---|
84 | This may be surprising at first:
|
---|
85 |
|
---|
86 | >>> def g3():
|
---|
87 | ... try:
|
---|
88 | ... return
|
---|
89 | ... finally:
|
---|
90 | ... yield 1
|
---|
91 | ...
|
---|
92 | >>> list(g3())
|
---|
93 | [1]
|
---|
94 |
|
---|
95 | Let's create an alternate range() function implemented as a generator:
|
---|
96 |
|
---|
97 | >>> def yrange(n):
|
---|
98 | ... for i in range(n):
|
---|
99 | ... yield i
|
---|
100 | ...
|
---|
101 | >>> list(yrange(5))
|
---|
102 | [0, 1, 2, 3, 4]
|
---|
103 |
|
---|
104 | Generators always return to the most recent caller:
|
---|
105 |
|
---|
106 | >>> def creator():
|
---|
107 | ... r = yrange(5)
|
---|
108 | ... print "creator", r.next()
|
---|
109 | ... return r
|
---|
110 | ...
|
---|
111 | >>> def caller():
|
---|
112 | ... r = creator()
|
---|
113 | ... for i in r:
|
---|
114 | ... print "caller", i
|
---|
115 | ...
|
---|
116 | >>> caller()
|
---|
117 | creator 0
|
---|
118 | caller 1
|
---|
119 | caller 2
|
---|
120 | caller 3
|
---|
121 | caller 4
|
---|
122 |
|
---|
123 | Generators can call other generators:
|
---|
124 |
|
---|
125 | >>> def zrange(n):
|
---|
126 | ... for i in yrange(n):
|
---|
127 | ... yield i
|
---|
128 | ...
|
---|
129 | >>> list(zrange(5))
|
---|
130 | [0, 1, 2, 3, 4]
|
---|
131 |
|
---|
132 | """
|
---|
133 |
|
---|
134 | # The examples from PEP 255.
|
---|
135 |
|
---|
136 | pep_tests = """
|
---|
137 |
|
---|
138 | Specification: Yield
|
---|
139 |
|
---|
140 | Restriction: A generator cannot be resumed while it is actively
|
---|
141 | running:
|
---|
142 |
|
---|
143 | >>> def g():
|
---|
144 | ... i = me.next()
|
---|
145 | ... yield i
|
---|
146 | >>> me = g()
|
---|
147 | >>> me.next()
|
---|
148 | Traceback (most recent call last):
|
---|
149 | ...
|
---|
150 | File "<string>", line 2, in g
|
---|
151 | ValueError: generator already executing
|
---|
152 |
|
---|
153 | Specification: Return
|
---|
154 |
|
---|
155 | Note that return isn't always equivalent to raising StopIteration: the
|
---|
156 | difference lies in how enclosing try/except constructs are treated.
|
---|
157 | For example,
|
---|
158 |
|
---|
159 | >>> def f1():
|
---|
160 | ... try:
|
---|
161 | ... return
|
---|
162 | ... except:
|
---|
163 | ... yield 1
|
---|
164 | >>> print list(f1())
|
---|
165 | []
|
---|
166 |
|
---|
167 | because, as in any function, return simply exits, but
|
---|
168 |
|
---|
169 | >>> def f2():
|
---|
170 | ... try:
|
---|
171 | ... raise StopIteration
|
---|
172 | ... except:
|
---|
173 | ... yield 42
|
---|
174 | >>> print list(f2())
|
---|
175 | [42]
|
---|
176 |
|
---|
177 | because StopIteration is captured by a bare "except", as is any
|
---|
178 | exception.
|
---|
179 |
|
---|
180 | Specification: Generators and Exception Propagation
|
---|
181 |
|
---|
182 | >>> def f():
|
---|
183 | ... return 1//0
|
---|
184 | >>> def g():
|
---|
185 | ... yield f() # the zero division exception propagates
|
---|
186 | ... yield 42 # and we'll never get here
|
---|
187 | >>> k = g()
|
---|
188 | >>> k.next()
|
---|
189 | Traceback (most recent call last):
|
---|
190 | File "<stdin>", line 1, in ?
|
---|
191 | File "<stdin>", line 2, in g
|
---|
192 | File "<stdin>", line 2, in f
|
---|
193 | ZeroDivisionError: integer division or modulo by zero
|
---|
194 | >>> k.next() # and the generator cannot be resumed
|
---|
195 | Traceback (most recent call last):
|
---|
196 | File "<stdin>", line 1, in ?
|
---|
197 | StopIteration
|
---|
198 | >>>
|
---|
199 |
|
---|
200 | Specification: Try/Except/Finally
|
---|
201 |
|
---|
202 | >>> def f():
|
---|
203 | ... try:
|
---|
204 | ... yield 1
|
---|
205 | ... try:
|
---|
206 | ... yield 2
|
---|
207 | ... 1//0
|
---|
208 | ... yield 3 # never get here
|
---|
209 | ... except ZeroDivisionError:
|
---|
210 | ... yield 4
|
---|
211 | ... yield 5
|
---|
212 | ... raise
|
---|
213 | ... except:
|
---|
214 | ... yield 6
|
---|
215 | ... yield 7 # the "raise" above stops this
|
---|
216 | ... except:
|
---|
217 | ... yield 8
|
---|
218 | ... yield 9
|
---|
219 | ... try:
|
---|
220 | ... x = 12
|
---|
221 | ... finally:
|
---|
222 | ... yield 10
|
---|
223 | ... yield 11
|
---|
224 | >>> print list(f())
|
---|
225 | [1, 2, 4, 5, 8, 9, 10, 11]
|
---|
226 | >>>
|
---|
227 |
|
---|
228 | Guido's binary tree example.
|
---|
229 |
|
---|
230 | >>> # A binary tree class.
|
---|
231 | >>> class Tree:
|
---|
232 | ...
|
---|
233 | ... def __init__(self, label, left=None, right=None):
|
---|
234 | ... self.label = label
|
---|
235 | ... self.left = left
|
---|
236 | ... self.right = right
|
---|
237 | ...
|
---|
238 | ... def __repr__(self, level=0, indent=" "):
|
---|
239 | ... s = level*indent + repr(self.label)
|
---|
240 | ... if self.left:
|
---|
241 | ... s = s + "\\n" + self.left.__repr__(level+1, indent)
|
---|
242 | ... if self.right:
|
---|
243 | ... s = s + "\\n" + self.right.__repr__(level+1, indent)
|
---|
244 | ... return s
|
---|
245 | ...
|
---|
246 | ... def __iter__(self):
|
---|
247 | ... return inorder(self)
|
---|
248 |
|
---|
249 | >>> # Create a Tree from a list.
|
---|
250 | >>> def tree(list):
|
---|
251 | ... n = len(list)
|
---|
252 | ... if n == 0:
|
---|
253 | ... return []
|
---|
254 | ... i = n // 2
|
---|
255 | ... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
|
---|
256 |
|
---|
257 | >>> # Show it off: create a tree.
|
---|
258 | >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
---|
259 |
|
---|
260 | >>> # A recursive generator that generates Tree labels in in-order.
|
---|
261 | >>> def inorder(t):
|
---|
262 | ... if t:
|
---|
263 | ... for x in inorder(t.left):
|
---|
264 | ... yield x
|
---|
265 | ... yield t.label
|
---|
266 | ... for x in inorder(t.right):
|
---|
267 | ... yield x
|
---|
268 |
|
---|
269 | >>> # Show it off: create a tree.
|
---|
270 | >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
---|
271 | >>> # Print the nodes of the tree in in-order.
|
---|
272 | >>> for x in t:
|
---|
273 | ... print x,
|
---|
274 | A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
|
---|
275 |
|
---|
276 | >>> # A non-recursive generator.
|
---|
277 | >>> def inorder(node):
|
---|
278 | ... stack = []
|
---|
279 | ... while node:
|
---|
280 | ... while node.left:
|
---|
281 | ... stack.append(node)
|
---|
282 | ... node = node.left
|
---|
283 | ... yield node.label
|
---|
284 | ... while not node.right:
|
---|
285 | ... try:
|
---|
286 | ... node = stack.pop()
|
---|
287 | ... except IndexError:
|
---|
288 | ... return
|
---|
289 | ... yield node.label
|
---|
290 | ... node = node.right
|
---|
291 |
|
---|
292 | >>> # Exercise the non-recursive generator.
|
---|
293 | >>> for x in t:
|
---|
294 | ... print x,
|
---|
295 | A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
|
---|
296 |
|
---|
297 | """
|
---|
298 |
|
---|
299 | # Examples from Iterator-List and Python-Dev and c.l.py.
|
---|
300 |
|
---|
301 | email_tests = """
|
---|
302 |
|
---|
303 | The difference between yielding None and returning it.
|
---|
304 |
|
---|
305 | >>> def g():
|
---|
306 | ... for i in range(3):
|
---|
307 | ... yield None
|
---|
308 | ... yield None
|
---|
309 | ... return
|
---|
310 | >>> list(g())
|
---|
311 | [None, None, None, None]
|
---|
312 |
|
---|
313 | Ensure that explicitly raising StopIteration acts like any other exception
|
---|
314 | in try/except, not like a return.
|
---|
315 |
|
---|
316 | >>> def g():
|
---|
317 | ... yield 1
|
---|
318 | ... try:
|
---|
319 | ... raise StopIteration
|
---|
320 | ... except:
|
---|
321 | ... yield 2
|
---|
322 | ... yield 3
|
---|
323 | >>> list(g())
|
---|
324 | [1, 2, 3]
|
---|
325 |
|
---|
326 | Next one was posted to c.l.py.
|
---|
327 |
|
---|
328 | >>> def gcomb(x, k):
|
---|
329 | ... "Generate all combinations of k elements from list x."
|
---|
330 | ...
|
---|
331 | ... if k > len(x):
|
---|
332 | ... return
|
---|
333 | ... if k == 0:
|
---|
334 | ... yield []
|
---|
335 | ... else:
|
---|
336 | ... first, rest = x[0], x[1:]
|
---|
337 | ... # A combination does or doesn't contain first.
|
---|
338 | ... # If it does, the remainder is a k-1 comb of rest.
|
---|
339 | ... for c in gcomb(rest, k-1):
|
---|
340 | ... c.insert(0, first)
|
---|
341 | ... yield c
|
---|
342 | ... # If it doesn't contain first, it's a k comb of rest.
|
---|
343 | ... for c in gcomb(rest, k):
|
---|
344 | ... yield c
|
---|
345 |
|
---|
346 | >>> seq = range(1, 5)
|
---|
347 | >>> for k in range(len(seq) + 2):
|
---|
348 | ... print "%d-combs of %s:" % (k, seq)
|
---|
349 | ... for c in gcomb(seq, k):
|
---|
350 | ... print " ", c
|
---|
351 | 0-combs of [1, 2, 3, 4]:
|
---|
352 | []
|
---|
353 | 1-combs of [1, 2, 3, 4]:
|
---|
354 | [1]
|
---|
355 | [2]
|
---|
356 | [3]
|
---|
357 | [4]
|
---|
358 | 2-combs of [1, 2, 3, 4]:
|
---|
359 | [1, 2]
|
---|
360 | [1, 3]
|
---|
361 | [1, 4]
|
---|
362 | [2, 3]
|
---|
363 | [2, 4]
|
---|
364 | [3, 4]
|
---|
365 | 3-combs of [1, 2, 3, 4]:
|
---|
366 | [1, 2, 3]
|
---|
367 | [1, 2, 4]
|
---|
368 | [1, 3, 4]
|
---|
369 | [2, 3, 4]
|
---|
370 | 4-combs of [1, 2, 3, 4]:
|
---|
371 | [1, 2, 3, 4]
|
---|
372 | 5-combs of [1, 2, 3, 4]:
|
---|
373 |
|
---|
374 | From the Iterators list, about the types of these things.
|
---|
375 |
|
---|
376 | >>> def g():
|
---|
377 | ... yield 1
|
---|
378 | ...
|
---|
379 | >>> type(g)
|
---|
380 | <type 'function'>
|
---|
381 | >>> i = g()
|
---|
382 | >>> type(i)
|
---|
383 | <type 'generator'>
|
---|
384 | >>> [s for s in dir(i) if not s.startswith('_')]
|
---|
385 | ['close', 'gi_code', 'gi_frame', 'gi_running', 'next', 'send', 'throw']
|
---|
386 | >>> from test.test_support import HAVE_DOCSTRINGS
|
---|
387 | >>> print(i.next.__doc__ if HAVE_DOCSTRINGS else 'x.next() -> the next value, or raise StopIteration')
|
---|
388 | x.next() -> the next value, or raise StopIteration
|
---|
389 | >>> iter(i) is i
|
---|
390 | True
|
---|
391 | >>> import types
|
---|
392 | >>> isinstance(i, types.GeneratorType)
|
---|
393 | True
|
---|
394 |
|
---|
395 | And more, added later.
|
---|
396 |
|
---|
397 | >>> i.gi_running
|
---|
398 | 0
|
---|
399 | >>> type(i.gi_frame)
|
---|
400 | <type 'frame'>
|
---|
401 | >>> i.gi_running = 42
|
---|
402 | Traceback (most recent call last):
|
---|
403 | ...
|
---|
404 | TypeError: readonly attribute
|
---|
405 | >>> def g():
|
---|
406 | ... yield me.gi_running
|
---|
407 | >>> me = g()
|
---|
408 | >>> me.gi_running
|
---|
409 | 0
|
---|
410 | >>> me.next()
|
---|
411 | 1
|
---|
412 | >>> me.gi_running
|
---|
413 | 0
|
---|
414 |
|
---|
415 | A clever union-find implementation from c.l.py, due to David Eppstein.
|
---|
416 | Sent: Friday, June 29, 2001 12:16 PM
|
---|
417 | To: python-list@python.org
|
---|
418 | Subject: Re: PEP 255: Simple Generators
|
---|
419 |
|
---|
420 | >>> class disjointSet:
|
---|
421 | ... def __init__(self, name):
|
---|
422 | ... self.name = name
|
---|
423 | ... self.parent = None
|
---|
424 | ... self.generator = self.generate()
|
---|
425 | ...
|
---|
426 | ... def generate(self):
|
---|
427 | ... while not self.parent:
|
---|
428 | ... yield self
|
---|
429 | ... for x in self.parent.generator:
|
---|
430 | ... yield x
|
---|
431 | ...
|
---|
432 | ... def find(self):
|
---|
433 | ... return self.generator.next()
|
---|
434 | ...
|
---|
435 | ... def union(self, parent):
|
---|
436 | ... if self.parent:
|
---|
437 | ... raise ValueError("Sorry, I'm not a root!")
|
---|
438 | ... self.parent = parent
|
---|
439 | ...
|
---|
440 | ... def __str__(self):
|
---|
441 | ... return self.name
|
---|
442 |
|
---|
443 | >>> names = "ABCDEFGHIJKLM"
|
---|
444 | >>> sets = [disjointSet(name) for name in names]
|
---|
445 | >>> roots = sets[:]
|
---|
446 |
|
---|
447 | >>> import random
|
---|
448 | >>> gen = random.WichmannHill(42)
|
---|
449 | >>> while 1:
|
---|
450 | ... for s in sets:
|
---|
451 | ... print "%s->%s" % (s, s.find()),
|
---|
452 | ... print
|
---|
453 | ... if len(roots) > 1:
|
---|
454 | ... s1 = gen.choice(roots)
|
---|
455 | ... roots.remove(s1)
|
---|
456 | ... s2 = gen.choice(roots)
|
---|
457 | ... s1.union(s2)
|
---|
458 | ... print "merged", s1, "into", s2
|
---|
459 | ... else:
|
---|
460 | ... break
|
---|
461 | A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
|
---|
462 | merged D into G
|
---|
463 | A->A B->B C->C D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
|
---|
464 | merged C into F
|
---|
465 | A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
|
---|
466 | merged L into A
|
---|
467 | A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->A M->M
|
---|
468 | merged H into E
|
---|
469 | A->A B->B C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
|
---|
470 | merged B into E
|
---|
471 | A->A B->E C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
|
---|
472 | merged J into G
|
---|
473 | A->A B->E C->F D->G E->E F->F G->G H->E I->I J->G K->K L->A M->M
|
---|
474 | merged E into G
|
---|
475 | A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->M
|
---|
476 | merged M into G
|
---|
477 | A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->G
|
---|
478 | merged I into K
|
---|
479 | A->A B->G C->F D->G E->G F->F G->G H->G I->K J->G K->K L->A M->G
|
---|
480 | merged K into A
|
---|
481 | A->A B->G C->F D->G E->G F->F G->G H->G I->A J->G K->A L->A M->G
|
---|
482 | merged F into A
|
---|
483 | A->A B->G C->A D->G E->G F->A G->G H->G I->A J->G K->A L->A M->G
|
---|
484 | merged A into G
|
---|
485 | A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
|
---|
486 |
|
---|
487 | """
|
---|
488 | # Emacs turd '
|
---|
489 |
|
---|
490 | # Fun tests (for sufficiently warped notions of "fun").
|
---|
491 |
|
---|
492 | fun_tests = """
|
---|
493 |
|
---|
494 | Build up to a recursive Sieve of Eratosthenes generator.
|
---|
495 |
|
---|
496 | >>> def firstn(g, n):
|
---|
497 | ... return [g.next() for i in range(n)]
|
---|
498 |
|
---|
499 | >>> def intsfrom(i):
|
---|
500 | ... while 1:
|
---|
501 | ... yield i
|
---|
502 | ... i += 1
|
---|
503 |
|
---|
504 | >>> firstn(intsfrom(5), 7)
|
---|
505 | [5, 6, 7, 8, 9, 10, 11]
|
---|
506 |
|
---|
507 | >>> def exclude_multiples(n, ints):
|
---|
508 | ... for i in ints:
|
---|
509 | ... if i % n:
|
---|
510 | ... yield i
|
---|
511 |
|
---|
512 | >>> firstn(exclude_multiples(3, intsfrom(1)), 6)
|
---|
513 | [1, 2, 4, 5, 7, 8]
|
---|
514 |
|
---|
515 | >>> def sieve(ints):
|
---|
516 | ... prime = ints.next()
|
---|
517 | ... yield prime
|
---|
518 | ... not_divisible_by_prime = exclude_multiples(prime, ints)
|
---|
519 | ... for p in sieve(not_divisible_by_prime):
|
---|
520 | ... yield p
|
---|
521 |
|
---|
522 | >>> primes = sieve(intsfrom(2))
|
---|
523 | >>> firstn(primes, 20)
|
---|
524 | [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
|
---|
525 |
|
---|
526 |
|
---|
527 | Another famous problem: generate all integers of the form
|
---|
528 | 2**i * 3**j * 5**k
|
---|
529 | in increasing order, where i,j,k >= 0. Trickier than it may look at first!
|
---|
530 | Try writing it without generators, and correctly, and without generating
|
---|
531 | 3 internal results for each result output.
|
---|
532 |
|
---|
533 | >>> def times(n, g):
|
---|
534 | ... for i in g:
|
---|
535 | ... yield n * i
|
---|
536 | >>> firstn(times(10, intsfrom(1)), 10)
|
---|
537 | [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
|
---|
538 |
|
---|
539 | >>> def merge(g, h):
|
---|
540 | ... ng = g.next()
|
---|
541 | ... nh = h.next()
|
---|
542 | ... while 1:
|
---|
543 | ... if ng < nh:
|
---|
544 | ... yield ng
|
---|
545 | ... ng = g.next()
|
---|
546 | ... elif ng > nh:
|
---|
547 | ... yield nh
|
---|
548 | ... nh = h.next()
|
---|
549 | ... else:
|
---|
550 | ... yield ng
|
---|
551 | ... ng = g.next()
|
---|
552 | ... nh = h.next()
|
---|
553 |
|
---|
554 | The following works, but is doing a whale of a lot of redundant work --
|
---|
555 | it's not clear how to get the internal uses of m235 to share a single
|
---|
556 | generator. Note that me_times2 (etc) each need to see every element in the
|
---|
557 | result sequence. So this is an example where lazy lists are more natural
|
---|
558 | (you can look at the head of a lazy list any number of times).
|
---|
559 |
|
---|
560 | >>> def m235():
|
---|
561 | ... yield 1
|
---|
562 | ... me_times2 = times(2, m235())
|
---|
563 | ... me_times3 = times(3, m235())
|
---|
564 | ... me_times5 = times(5, m235())
|
---|
565 | ... for i in merge(merge(me_times2,
|
---|
566 | ... me_times3),
|
---|
567 | ... me_times5):
|
---|
568 | ... yield i
|
---|
569 |
|
---|
570 | Don't print "too many" of these -- the implementation above is extremely
|
---|
571 | inefficient: each call of m235() leads to 3 recursive calls, and in
|
---|
572 | turn each of those 3 more, and so on, and so on, until we've descended
|
---|
573 | enough levels to satisfy the print stmts. Very odd: when I printed 5
|
---|
574 | lines of results below, this managed to screw up Win98's malloc in "the
|
---|
575 | usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
|
---|
576 | address space, and it *looked* like a very slow leak.
|
---|
577 |
|
---|
578 | >>> result = m235()
|
---|
579 | >>> for i in range(3):
|
---|
580 | ... print firstn(result, 15)
|
---|
581 | [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
|
---|
582 | [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
|
---|
583 | [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
|
---|
584 |
|
---|
585 | Heh. Here's one way to get a shared list, complete with an excruciating
|
---|
586 | namespace renaming trick. The *pretty* part is that the times() and merge()
|
---|
587 | functions can be reused as-is, because they only assume their stream
|
---|
588 | arguments are iterable -- a LazyList is the same as a generator to times().
|
---|
589 |
|
---|
590 | >>> class LazyList:
|
---|
591 | ... def __init__(self, g):
|
---|
592 | ... self.sofar = []
|
---|
593 | ... self.fetch = g.next
|
---|
594 | ...
|
---|
595 | ... def __getitem__(self, i):
|
---|
596 | ... sofar, fetch = self.sofar, self.fetch
|
---|
597 | ... while i >= len(sofar):
|
---|
598 | ... sofar.append(fetch())
|
---|
599 | ... return sofar[i]
|
---|
600 |
|
---|
601 | >>> def m235():
|
---|
602 | ... yield 1
|
---|
603 | ... # Gack: m235 below actually refers to a LazyList.
|
---|
604 | ... me_times2 = times(2, m235)
|
---|
605 | ... me_times3 = times(3, m235)
|
---|
606 | ... me_times5 = times(5, m235)
|
---|
607 | ... for i in merge(merge(me_times2,
|
---|
608 | ... me_times3),
|
---|
609 | ... me_times5):
|
---|
610 | ... yield i
|
---|
611 |
|
---|
612 | Print as many of these as you like -- *this* implementation is memory-
|
---|
613 | efficient.
|
---|
614 |
|
---|
615 | >>> m235 = LazyList(m235())
|
---|
616 | >>> for i in range(5):
|
---|
617 | ... print [m235[j] for j in range(15*i, 15*(i+1))]
|
---|
618 | [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
|
---|
619 | [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
|
---|
620 | [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
|
---|
621 | [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
|
---|
622 | [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
|
---|
623 |
|
---|
624 | Ye olde Fibonacci generator, LazyList style.
|
---|
625 |
|
---|
626 | >>> def fibgen(a, b):
|
---|
627 | ...
|
---|
628 | ... def sum(g, h):
|
---|
629 | ... while 1:
|
---|
630 | ... yield g.next() + h.next()
|
---|
631 | ...
|
---|
632 | ... def tail(g):
|
---|
633 | ... g.next() # throw first away
|
---|
634 | ... for x in g:
|
---|
635 | ... yield x
|
---|
636 | ...
|
---|
637 | ... yield a
|
---|
638 | ... yield b
|
---|
639 | ... for s in sum(iter(fib),
|
---|
640 | ... tail(iter(fib))):
|
---|
641 | ... yield s
|
---|
642 |
|
---|
643 | >>> fib = LazyList(fibgen(1, 2))
|
---|
644 | >>> firstn(iter(fib), 17)
|
---|
645 | [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
|
---|
646 |
|
---|
647 |
|
---|
648 | Running after your tail with itertools.tee (new in version 2.4)
|
---|
649 |
|
---|
650 | The algorithms "m235" (Hamming) and Fibonacci presented above are both
|
---|
651 | examples of a whole family of FP (functional programming) algorithms
|
---|
652 | where a function produces and returns a list while the production algorithm
|
---|
653 | suppose the list as already produced by recursively calling itself.
|
---|
654 | For these algorithms to work, they must:
|
---|
655 |
|
---|
656 | - produce at least a first element without presupposing the existence of
|
---|
657 | the rest of the list
|
---|
658 | - produce their elements in a lazy manner
|
---|
659 |
|
---|
660 | To work efficiently, the beginning of the list must not be recomputed over
|
---|
661 | and over again. This is ensured in most FP languages as a built-in feature.
|
---|
662 | In python, we have to explicitly maintain a list of already computed results
|
---|
663 | and abandon genuine recursivity.
|
---|
664 |
|
---|
665 | This is what had been attempted above with the LazyList class. One problem
|
---|
666 | with that class is that it keeps a list of all of the generated results and
|
---|
667 | therefore continually grows. This partially defeats the goal of the generator
|
---|
668 | concept, viz. produce the results only as needed instead of producing them
|
---|
669 | all and thereby wasting memory.
|
---|
670 |
|
---|
671 | Thanks to itertools.tee, it is now clear "how to get the internal uses of
|
---|
672 | m235 to share a single generator".
|
---|
673 |
|
---|
674 | >>> from itertools import tee
|
---|
675 | >>> def m235():
|
---|
676 | ... def _m235():
|
---|
677 | ... yield 1
|
---|
678 | ... for n in merge(times(2, m2),
|
---|
679 | ... merge(times(3, m3),
|
---|
680 | ... times(5, m5))):
|
---|
681 | ... yield n
|
---|
682 | ... m1 = _m235()
|
---|
683 | ... m2, m3, m5, mRes = tee(m1, 4)
|
---|
684 | ... return mRes
|
---|
685 |
|
---|
686 | >>> it = m235()
|
---|
687 | >>> for i in range(5):
|
---|
688 | ... print firstn(it, 15)
|
---|
689 | [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
|
---|
690 | [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
|
---|
691 | [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
|
---|
692 | [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
|
---|
693 | [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
|
---|
694 |
|
---|
695 | The "tee" function does just what we want. It internally keeps a generated
|
---|
696 | result for as long as it has not been "consumed" from all of the duplicated
|
---|
697 | iterators, whereupon it is deleted. You can therefore print the hamming
|
---|
698 | sequence during hours without increasing memory usage, or very little.
|
---|
699 |
|
---|
700 | The beauty of it is that recursive running-after-their-tail FP algorithms
|
---|
701 | are quite straightforwardly expressed with this Python idiom.
|
---|
702 |
|
---|
703 | Ye olde Fibonacci generator, tee style.
|
---|
704 |
|
---|
705 | >>> def fib():
|
---|
706 | ...
|
---|
707 | ... def _isum(g, h):
|
---|
708 | ... while 1:
|
---|
709 | ... yield g.next() + h.next()
|
---|
710 | ...
|
---|
711 | ... def _fib():
|
---|
712 | ... yield 1
|
---|
713 | ... yield 2
|
---|
714 | ... fibTail.next() # throw first away
|
---|
715 | ... for res in _isum(fibHead, fibTail):
|
---|
716 | ... yield res
|
---|
717 | ...
|
---|
718 | ... realfib = _fib()
|
---|
719 | ... fibHead, fibTail, fibRes = tee(realfib, 3)
|
---|
720 | ... return fibRes
|
---|
721 |
|
---|
722 | >>> firstn(fib(), 17)
|
---|
723 | [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
|
---|
724 |
|
---|
725 | """
|
---|
726 |
|
---|
727 | # syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
|
---|
728 | # hackery.
|
---|
729 |
|
---|
730 | syntax_tests = """
|
---|
731 |
|
---|
732 | >>> def f():
|
---|
733 | ... return 22
|
---|
734 | ... yield 1
|
---|
735 | Traceback (most recent call last):
|
---|
736 | ..
|
---|
737 | SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[0]>, line 3)
|
---|
738 |
|
---|
739 | >>> def f():
|
---|
740 | ... yield 1
|
---|
741 | ... return 22
|
---|
742 | Traceback (most recent call last):
|
---|
743 | ..
|
---|
744 | SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[1]>, line 3)
|
---|
745 |
|
---|
746 | "return None" is not the same as "return" in a generator:
|
---|
747 |
|
---|
748 | >>> def f():
|
---|
749 | ... yield 1
|
---|
750 | ... return None
|
---|
751 | Traceback (most recent call last):
|
---|
752 | ..
|
---|
753 | SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[2]>, line 3)
|
---|
754 |
|
---|
755 | These are fine:
|
---|
756 |
|
---|
757 | >>> def f():
|
---|
758 | ... yield 1
|
---|
759 | ... return
|
---|
760 |
|
---|
761 | >>> def f():
|
---|
762 | ... try:
|
---|
763 | ... yield 1
|
---|
764 | ... finally:
|
---|
765 | ... pass
|
---|
766 |
|
---|
767 | >>> def f():
|
---|
768 | ... try:
|
---|
769 | ... try:
|
---|
770 | ... 1//0
|
---|
771 | ... except ZeroDivisionError:
|
---|
772 | ... yield 666
|
---|
773 | ... except:
|
---|
774 | ... pass
|
---|
775 | ... finally:
|
---|
776 | ... pass
|
---|
777 |
|
---|
778 | >>> def f():
|
---|
779 | ... try:
|
---|
780 | ... try:
|
---|
781 | ... yield 12
|
---|
782 | ... 1//0
|
---|
783 | ... except ZeroDivisionError:
|
---|
784 | ... yield 666
|
---|
785 | ... except:
|
---|
786 | ... try:
|
---|
787 | ... x = 12
|
---|
788 | ... finally:
|
---|
789 | ... yield 12
|
---|
790 | ... except:
|
---|
791 | ... return
|
---|
792 | >>> list(f())
|
---|
793 | [12, 666]
|
---|
794 |
|
---|
795 | >>> def f():
|
---|
796 | ... yield
|
---|
797 | >>> type(f())
|
---|
798 | <type 'generator'>
|
---|
799 |
|
---|
800 |
|
---|
801 | >>> def f():
|
---|
802 | ... if 0:
|
---|
803 | ... yield
|
---|
804 | >>> type(f())
|
---|
805 | <type 'generator'>
|
---|
806 |
|
---|
807 |
|
---|
808 | >>> def f():
|
---|
809 | ... if 0:
|
---|
810 | ... yield 1
|
---|
811 | >>> type(f())
|
---|
812 | <type 'generator'>
|
---|
813 |
|
---|
814 | >>> def f():
|
---|
815 | ... if "":
|
---|
816 | ... yield None
|
---|
817 | >>> type(f())
|
---|
818 | <type 'generator'>
|
---|
819 |
|
---|
820 | >>> def f():
|
---|
821 | ... return
|
---|
822 | ... try:
|
---|
823 | ... if x==4:
|
---|
824 | ... pass
|
---|
825 | ... elif 0:
|
---|
826 | ... try:
|
---|
827 | ... 1//0
|
---|
828 | ... except SyntaxError:
|
---|
829 | ... pass
|
---|
830 | ... else:
|
---|
831 | ... if 0:
|
---|
832 | ... while 12:
|
---|
833 | ... x += 1
|
---|
834 | ... yield 2 # don't blink
|
---|
835 | ... f(a, b, c, d, e)
|
---|
836 | ... else:
|
---|
837 | ... pass
|
---|
838 | ... except:
|
---|
839 | ... x = 1
|
---|
840 | ... return
|
---|
841 | >>> type(f())
|
---|
842 | <type 'generator'>
|
---|
843 |
|
---|
844 | >>> def f():
|
---|
845 | ... if 0:
|
---|
846 | ... def g():
|
---|
847 | ... yield 1
|
---|
848 | ...
|
---|
849 | >>> type(f())
|
---|
850 | <type 'NoneType'>
|
---|
851 |
|
---|
852 | >>> def f():
|
---|
853 | ... if 0:
|
---|
854 | ... class C:
|
---|
855 | ... def __init__(self):
|
---|
856 | ... yield 1
|
---|
857 | ... def f(self):
|
---|
858 | ... yield 2
|
---|
859 | >>> type(f())
|
---|
860 | <type 'NoneType'>
|
---|
861 |
|
---|
862 | >>> def f():
|
---|
863 | ... if 0:
|
---|
864 | ... return
|
---|
865 | ... if 0:
|
---|
866 | ... yield 2
|
---|
867 | >>> type(f())
|
---|
868 | <type 'generator'>
|
---|
869 |
|
---|
870 |
|
---|
871 | >>> def f():
|
---|
872 | ... if 0:
|
---|
873 | ... lambda x: x # shouldn't trigger here
|
---|
874 | ... return # or here
|
---|
875 | ... def f(i):
|
---|
876 | ... return 2*i # or here
|
---|
877 | ... if 0:
|
---|
878 | ... return 3 # but *this* sucks (line 8)
|
---|
879 | ... if 0:
|
---|
880 | ... yield 2 # because it's a generator (line 10)
|
---|
881 | Traceback (most recent call last):
|
---|
882 | SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[24]>, line 10)
|
---|
883 |
|
---|
884 | This one caused a crash (see SF bug 567538):
|
---|
885 |
|
---|
886 | >>> def f():
|
---|
887 | ... for i in range(3):
|
---|
888 | ... try:
|
---|
889 | ... continue
|
---|
890 | ... finally:
|
---|
891 | ... yield i
|
---|
892 | ...
|
---|
893 | >>> g = f()
|
---|
894 | >>> print g.next()
|
---|
895 | 0
|
---|
896 | >>> print g.next()
|
---|
897 | 1
|
---|
898 | >>> print g.next()
|
---|
899 | 2
|
---|
900 | >>> print g.next()
|
---|
901 | Traceback (most recent call last):
|
---|
902 | StopIteration
|
---|
903 |
|
---|
904 |
|
---|
905 | Test the gi_code attribute
|
---|
906 |
|
---|
907 | >>> def f():
|
---|
908 | ... yield 5
|
---|
909 | ...
|
---|
910 | >>> g = f()
|
---|
911 | >>> g.gi_code is f.func_code
|
---|
912 | True
|
---|
913 | >>> g.next()
|
---|
914 | 5
|
---|
915 | >>> g.next()
|
---|
916 | Traceback (most recent call last):
|
---|
917 | StopIteration
|
---|
918 | >>> g.gi_code is f.func_code
|
---|
919 | True
|
---|
920 |
|
---|
921 |
|
---|
922 | Test the __name__ attribute and the repr()
|
---|
923 |
|
---|
924 | >>> def f():
|
---|
925 | ... yield 5
|
---|
926 | ...
|
---|
927 | >>> g = f()
|
---|
928 | >>> g.__name__
|
---|
929 | 'f'
|
---|
930 | >>> repr(g) # doctest: +ELLIPSIS
|
---|
931 | '<generator object f at ...>'
|
---|
932 |
|
---|
933 | Lambdas shouldn't have their usual return behavior.
|
---|
934 |
|
---|
935 | >>> x = lambda: (yield 1)
|
---|
936 | >>> list(x())
|
---|
937 | [1]
|
---|
938 |
|
---|
939 | >>> x = lambda: ((yield 1), (yield 2))
|
---|
940 | >>> list(x())
|
---|
941 | [1, 2]
|
---|
942 | """
|
---|
943 |
|
---|
944 | # conjoin is a simple backtracking generator, named in honor of Icon's
|
---|
945 | # "conjunction" control structure. Pass a list of no-argument functions
|
---|
946 | # that return iterable objects. Easiest to explain by example: assume the
|
---|
947 | # function list [x, y, z] is passed. Then conjoin acts like:
|
---|
948 | #
|
---|
949 | # def g():
|
---|
950 | # values = [None] * 3
|
---|
951 | # for values[0] in x():
|
---|
952 | # for values[1] in y():
|
---|
953 | # for values[2] in z():
|
---|
954 | # yield values
|
---|
955 | #
|
---|
956 | # So some 3-lists of values *may* be generated, each time we successfully
|
---|
957 | # get into the innermost loop. If an iterator fails (is exhausted) before
|
---|
958 | # then, it "backtracks" to get the next value from the nearest enclosing
|
---|
959 | # iterator (the one "to the left"), and starts all over again at the next
|
---|
960 | # slot (pumps a fresh iterator). Of course this is most useful when the
|
---|
961 | # iterators have side-effects, so that which values *can* be generated at
|
---|
962 | # each slot depend on the values iterated at previous slots.
|
---|
963 |
|
---|
964 | def simple_conjoin(gs):
|
---|
965 |
|
---|
966 | values = [None] * len(gs)
|
---|
967 |
|
---|
968 | def gen(i):
|
---|
969 | if i >= len(gs):
|
---|
970 | yield values
|
---|
971 | else:
|
---|
972 | for values[i] in gs[i]():
|
---|
973 | for x in gen(i+1):
|
---|
974 | yield x
|
---|
975 |
|
---|
976 | for x in gen(0):
|
---|
977 | yield x
|
---|
978 |
|
---|
979 | # That works fine, but recursing a level and checking i against len(gs) for
|
---|
980 | # each item produced is inefficient. By doing manual loop unrolling across
|
---|
981 | # generator boundaries, it's possible to eliminate most of that overhead.
|
---|
982 | # This isn't worth the bother *in general* for generators, but conjoin() is
|
---|
983 | # a core building block for some CPU-intensive generator applications.
|
---|
984 |
|
---|
985 | def conjoin(gs):
|
---|
986 |
|
---|
987 | n = len(gs)
|
---|
988 | values = [None] * n
|
---|
989 |
|
---|
990 | # Do one loop nest at time recursively, until the # of loop nests
|
---|
991 | # remaining is divisible by 3.
|
---|
992 |
|
---|
993 | def gen(i):
|
---|
994 | if i >= n:
|
---|
995 | yield values
|
---|
996 |
|
---|
997 | elif (n-i) % 3:
|
---|
998 | ip1 = i+1
|
---|
999 | for values[i] in gs[i]():
|
---|
1000 | for x in gen(ip1):
|
---|
1001 | yield x
|
---|
1002 |
|
---|
1003 | else:
|
---|
1004 | for x in _gen3(i):
|
---|
1005 | yield x
|
---|
1006 |
|
---|
1007 | # Do three loop nests at a time, recursing only if at least three more
|
---|
1008 | # remain. Don't call directly: this is an internal optimization for
|
---|
1009 | # gen's use.
|
---|
1010 |
|
---|
1011 | def _gen3(i):
|
---|
1012 | assert i < n and (n-i) % 3 == 0
|
---|
1013 | ip1, ip2, ip3 = i+1, i+2, i+3
|
---|
1014 | g, g1, g2 = gs[i : ip3]
|
---|
1015 |
|
---|
1016 | if ip3 >= n:
|
---|
1017 | # These are the last three, so we can yield values directly.
|
---|
1018 | for values[i] in g():
|
---|
1019 | for values[ip1] in g1():
|
---|
1020 | for values[ip2] in g2():
|
---|
1021 | yield values
|
---|
1022 |
|
---|
1023 | else:
|
---|
1024 | # At least 6 loop nests remain; peel off 3 and recurse for the
|
---|
1025 | # rest.
|
---|
1026 | for values[i] in g():
|
---|
1027 | for values[ip1] in g1():
|
---|
1028 | for values[ip2] in g2():
|
---|
1029 | for x in _gen3(ip3):
|
---|
1030 | yield x
|
---|
1031 |
|
---|
1032 | for x in gen(0):
|
---|
1033 | yield x
|
---|
1034 |
|
---|
1035 | # And one more approach: For backtracking apps like the Knight's Tour
|
---|
1036 | # solver below, the number of backtracking levels can be enormous (one
|
---|
1037 | # level per square, for the Knight's Tour, so that e.g. a 100x100 board
|
---|
1038 | # needs 10,000 levels). In such cases Python is likely to run out of
|
---|
1039 | # stack space due to recursion. So here's a recursion-free version of
|
---|
1040 | # conjoin too.
|
---|
1041 | # NOTE WELL: This allows large problems to be solved with only trivial
|
---|
1042 | # demands on stack space. Without explicitly resumable generators, this is
|
---|
1043 | # much harder to achieve. OTOH, this is much slower (up to a factor of 2)
|
---|
1044 | # than the fancy unrolled recursive conjoin.
|
---|
1045 |
|
---|
1046 | def flat_conjoin(gs): # rename to conjoin to run tests with this instead
|
---|
1047 | n = len(gs)
|
---|
1048 | values = [None] * n
|
---|
1049 | iters = [None] * n
|
---|
1050 | _StopIteration = StopIteration # make local because caught a *lot*
|
---|
1051 | i = 0
|
---|
1052 | while 1:
|
---|
1053 | # Descend.
|
---|
1054 | try:
|
---|
1055 | while i < n:
|
---|
1056 | it = iters[i] = gs[i]().next
|
---|
1057 | values[i] = it()
|
---|
1058 | i += 1
|
---|
1059 | except _StopIteration:
|
---|
1060 | pass
|
---|
1061 | else:
|
---|
1062 | assert i == n
|
---|
1063 | yield values
|
---|
1064 |
|
---|
1065 | # Backtrack until an older iterator can be resumed.
|
---|
1066 | i -= 1
|
---|
1067 | while i >= 0:
|
---|
1068 | try:
|
---|
1069 | values[i] = iters[i]()
|
---|
1070 | # Success! Start fresh at next level.
|
---|
1071 | i += 1
|
---|
1072 | break
|
---|
1073 | except _StopIteration:
|
---|
1074 | # Continue backtracking.
|
---|
1075 | i -= 1
|
---|
1076 | else:
|
---|
1077 | assert i < 0
|
---|
1078 | break
|
---|
1079 |
|
---|
1080 | # A conjoin-based N-Queens solver.
|
---|
1081 |
|
---|
1082 | class Queens:
|
---|
1083 | def __init__(self, n):
|
---|
1084 | self.n = n
|
---|
1085 | rangen = range(n)
|
---|
1086 |
|
---|
1087 | # Assign a unique int to each column and diagonal.
|
---|
1088 | # columns: n of those, range(n).
|
---|
1089 | # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
|
---|
1090 | # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
|
---|
1091 | # based.
|
---|
1092 | # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
|
---|
1093 | # each, smallest i+j is 0, largest is 2n-2.
|
---|
1094 |
|
---|
1095 | # For each square, compute a bit vector of the columns and
|
---|
1096 | # diagonals it covers, and for each row compute a function that
|
---|
1097 | # generates the possiblities for the columns in that row.
|
---|
1098 | self.rowgenerators = []
|
---|
1099 | for i in rangen:
|
---|
1100 | rowuses = [(1L << j) | # column ordinal
|
---|
1101 | (1L << (n + i-j + n-1)) | # NW-SE ordinal
|
---|
1102 | (1L << (n + 2*n-1 + i+j)) # NE-SW ordinal
|
---|
1103 | for j in rangen]
|
---|
1104 |
|
---|
1105 | def rowgen(rowuses=rowuses):
|
---|
1106 | for j in rangen:
|
---|
1107 | uses = rowuses[j]
|
---|
1108 | if uses & self.used == 0:
|
---|
1109 | self.used |= uses
|
---|
1110 | yield j
|
---|
1111 | self.used &= ~uses
|
---|
1112 |
|
---|
1113 | self.rowgenerators.append(rowgen)
|
---|
1114 |
|
---|
1115 | # Generate solutions.
|
---|
1116 | def solve(self):
|
---|
1117 | self.used = 0
|
---|
1118 | for row2col in conjoin(self.rowgenerators):
|
---|
1119 | yield row2col
|
---|
1120 |
|
---|
1121 | def printsolution(self, row2col):
|
---|
1122 | n = self.n
|
---|
1123 | assert n == len(row2col)
|
---|
1124 | sep = "+" + "-+" * n
|
---|
1125 | print sep
|
---|
1126 | for i in range(n):
|
---|
1127 | squares = [" " for j in range(n)]
|
---|
1128 | squares[row2col[i]] = "Q"
|
---|
1129 | print "|" + "|".join(squares) + "|"
|
---|
1130 | print sep
|
---|
1131 |
|
---|
1132 | # A conjoin-based Knight's Tour solver. This is pretty sophisticated
|
---|
1133 | # (e.g., when used with flat_conjoin above, and passing hard=1 to the
|
---|
1134 | # constructor, a 200x200 Knight's Tour was found quickly -- note that we're
|
---|
1135 | # creating 10s of thousands of generators then!), and is lengthy.
|
---|
1136 |
|
---|
1137 | class Knights:
|
---|
1138 | def __init__(self, m, n, hard=0):
|
---|
1139 | self.m, self.n = m, n
|
---|
1140 |
|
---|
1141 | # solve() will set up succs[i] to be a list of square #i's
|
---|
1142 | # successors.
|
---|
1143 | succs = self.succs = []
|
---|
1144 |
|
---|
1145 | # Remove i0 from each of its successor's successor lists, i.e.
|
---|
1146 | # successors can't go back to i0 again. Return 0 if we can
|
---|
1147 | # detect this makes a solution impossible, else return 1.
|
---|
1148 |
|
---|
1149 | def remove_from_successors(i0, len=len):
|
---|
1150 | # If we remove all exits from a free square, we're dead:
|
---|
1151 | # even if we move to it next, we can't leave it again.
|
---|
1152 | # If we create a square with one exit, we must visit it next;
|
---|
1153 | # else somebody else will have to visit it, and since there's
|
---|
1154 | # only one adjacent, there won't be a way to leave it again.
|
---|
1155 | # Finelly, if we create more than one free square with a
|
---|
1156 | # single exit, we can only move to one of them next, leaving
|
---|
1157 | # the other one a dead end.
|
---|
1158 | ne0 = ne1 = 0
|
---|
1159 | for i in succs[i0]:
|
---|
1160 | s = succs[i]
|
---|
1161 | s.remove(i0)
|
---|
1162 | e = len(s)
|
---|
1163 | if e == 0:
|
---|
1164 | ne0 += 1
|
---|
1165 | elif e == 1:
|
---|
1166 | ne1 += 1
|
---|
1167 | return ne0 == 0 and ne1 < 2
|
---|
1168 |
|
---|
1169 | # Put i0 back in each of its successor's successor lists.
|
---|
1170 |
|
---|
1171 | def add_to_successors(i0):
|
---|
1172 | for i in succs[i0]:
|
---|
1173 | succs[i].append(i0)
|
---|
1174 |
|
---|
1175 | # Generate the first move.
|
---|
1176 | def first():
|
---|
1177 | if m < 1 or n < 1:
|
---|
1178 | return
|
---|
1179 |
|
---|
1180 | # Since we're looking for a cycle, it doesn't matter where we
|
---|
1181 | # start. Starting in a corner makes the 2nd move easy.
|
---|
1182 | corner = self.coords2index(0, 0)
|
---|
1183 | remove_from_successors(corner)
|
---|
1184 | self.lastij = corner
|
---|
1185 | yield corner
|
---|
1186 | add_to_successors(corner)
|
---|
1187 |
|
---|
1188 | # Generate the second moves.
|
---|
1189 | def second():
|
---|
1190 | corner = self.coords2index(0, 0)
|
---|
1191 | assert self.lastij == corner # i.e., we started in the corner
|
---|
1192 | if m < 3 or n < 3:
|
---|
1193 | return
|
---|
1194 | assert len(succs[corner]) == 2
|
---|
1195 | assert self.coords2index(1, 2) in succs[corner]
|
---|
1196 | assert self.coords2index(2, 1) in succs[corner]
|
---|
1197 | # Only two choices. Whichever we pick, the other must be the
|
---|
1198 | # square picked on move m*n, as it's the only way to get back
|
---|
1199 | # to (0, 0). Save its index in self.final so that moves before
|
---|
1200 | # the last know it must be kept free.
|
---|
1201 | for i, j in (1, 2), (2, 1):
|
---|
1202 | this = self.coords2index(i, j)
|
---|
1203 | final = self.coords2index(3-i, 3-j)
|
---|
1204 | self.final = final
|
---|
1205 |
|
---|
1206 | remove_from_successors(this)
|
---|
1207 | succs[final].append(corner)
|
---|
1208 | self.lastij = this
|
---|
1209 | yield this
|
---|
1210 | succs[final].remove(corner)
|
---|
1211 | add_to_successors(this)
|
---|
1212 |
|
---|
1213 | # Generate moves 3 thru m*n-1.
|
---|
1214 | def advance(len=len):
|
---|
1215 | # If some successor has only one exit, must take it.
|
---|
1216 | # Else favor successors with fewer exits.
|
---|
1217 | candidates = []
|
---|
1218 | for i in succs[self.lastij]:
|
---|
1219 | e = len(succs[i])
|
---|
1220 | assert e > 0, "else remove_from_successors() pruning flawed"
|
---|
1221 | if e == 1:
|
---|
1222 | candidates = [(e, i)]
|
---|
1223 | break
|
---|
1224 | candidates.append((e, i))
|
---|
1225 | else:
|
---|
1226 | candidates.sort()
|
---|
1227 |
|
---|
1228 | for e, i in candidates:
|
---|
1229 | if i != self.final:
|
---|
1230 | if remove_from_successors(i):
|
---|
1231 | self.lastij = i
|
---|
1232 | yield i
|
---|
1233 | add_to_successors(i)
|
---|
1234 |
|
---|
1235 | # Generate moves 3 thru m*n-1. Alternative version using a
|
---|
1236 | # stronger (but more expensive) heuristic to order successors.
|
---|
1237 | # Since the # of backtracking levels is m*n, a poor move early on
|
---|
1238 | # can take eons to undo. Smallest square board for which this
|
---|
1239 | # matters a lot is 52x52.
|
---|
1240 | def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
|
---|
1241 | # If some successor has only one exit, must take it.
|
---|
1242 | # Else favor successors with fewer exits.
|
---|
1243 | # Break ties via max distance from board centerpoint (favor
|
---|
1244 | # corners and edges whenever possible).
|
---|
1245 | candidates = []
|
---|
1246 | for i in succs[self.lastij]:
|
---|
1247 | e = len(succs[i])
|
---|
1248 | assert e > 0, "else remove_from_successors() pruning flawed"
|
---|
1249 | if e == 1:
|
---|
1250 | candidates = [(e, 0, i)]
|
---|
1251 | break
|
---|
1252 | i1, j1 = self.index2coords(i)
|
---|
1253 | d = (i1 - vmid)**2 + (j1 - hmid)**2
|
---|
1254 | candidates.append((e, -d, i))
|
---|
1255 | else:
|
---|
1256 | candidates.sort()
|
---|
1257 |
|
---|
1258 | for e, d, i in candidates:
|
---|
1259 | if i != self.final:
|
---|
1260 | if remove_from_successors(i):
|
---|
1261 | self.lastij = i
|
---|
1262 | yield i
|
---|
1263 | add_to_successors(i)
|
---|
1264 |
|
---|
1265 | # Generate the last move.
|
---|
1266 | def last():
|
---|
1267 | assert self.final in succs[self.lastij]
|
---|
1268 | yield self.final
|
---|
1269 |
|
---|
1270 | if m*n < 4:
|
---|
1271 | self.squaregenerators = [first]
|
---|
1272 | else:
|
---|
1273 | self.squaregenerators = [first, second] + \
|
---|
1274 | [hard and advance_hard or advance] * (m*n - 3) + \
|
---|
1275 | [last]
|
---|
1276 |
|
---|
1277 | def coords2index(self, i, j):
|
---|
1278 | assert 0 <= i < self.m
|
---|
1279 | assert 0 <= j < self.n
|
---|
1280 | return i * self.n + j
|
---|
1281 |
|
---|
1282 | def index2coords(self, index):
|
---|
1283 | assert 0 <= index < self.m * self.n
|
---|
1284 | return divmod(index, self.n)
|
---|
1285 |
|
---|
1286 | def _init_board(self):
|
---|
1287 | succs = self.succs
|
---|
1288 | del succs[:]
|
---|
1289 | m, n = self.m, self.n
|
---|
1290 | c2i = self.coords2index
|
---|
1291 |
|
---|
1292 | offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
|
---|
1293 | (-1, -2), (-2, -1), (-2, 1), (-1, 2)]
|
---|
1294 | rangen = range(n)
|
---|
1295 | for i in range(m):
|
---|
1296 | for j in rangen:
|
---|
1297 | s = [c2i(i+io, j+jo) for io, jo in offsets
|
---|
1298 | if 0 <= i+io < m and
|
---|
1299 | 0 <= j+jo < n]
|
---|
1300 | succs.append(s)
|
---|
1301 |
|
---|
1302 | # Generate solutions.
|
---|
1303 | def solve(self):
|
---|
1304 | self._init_board()
|
---|
1305 | for x in conjoin(self.squaregenerators):
|
---|
1306 | yield x
|
---|
1307 |
|
---|
1308 | def printsolution(self, x):
|
---|
1309 | m, n = self.m, self.n
|
---|
1310 | assert len(x) == m*n
|
---|
1311 | w = len(str(m*n))
|
---|
1312 | format = "%" + str(w) + "d"
|
---|
1313 |
|
---|
1314 | squares = [[None] * n for i in range(m)]
|
---|
1315 | k = 1
|
---|
1316 | for i in x:
|
---|
1317 | i1, j1 = self.index2coords(i)
|
---|
1318 | squares[i1][j1] = format % k
|
---|
1319 | k += 1
|
---|
1320 |
|
---|
1321 | sep = "+" + ("-" * w + "+") * n
|
---|
1322 | print sep
|
---|
1323 | for i in range(m):
|
---|
1324 | row = squares[i]
|
---|
1325 | print "|" + "|".join(row) + "|"
|
---|
1326 | print sep
|
---|
1327 |
|
---|
1328 | conjoin_tests = """
|
---|
1329 |
|
---|
1330 | Generate the 3-bit binary numbers in order. This illustrates dumbest-
|
---|
1331 | possible use of conjoin, just to generate the full cross-product.
|
---|
1332 |
|
---|
1333 | >>> for c in conjoin([lambda: iter((0, 1))] * 3):
|
---|
1334 | ... print c
|
---|
1335 | [0, 0, 0]
|
---|
1336 | [0, 0, 1]
|
---|
1337 | [0, 1, 0]
|
---|
1338 | [0, 1, 1]
|
---|
1339 | [1, 0, 0]
|
---|
1340 | [1, 0, 1]
|
---|
1341 | [1, 1, 0]
|
---|
1342 | [1, 1, 1]
|
---|
1343 |
|
---|
1344 | For efficiency in typical backtracking apps, conjoin() yields the same list
|
---|
1345 | object each time. So if you want to save away a full account of its
|
---|
1346 | generated sequence, you need to copy its results.
|
---|
1347 |
|
---|
1348 | >>> def gencopy(iterator):
|
---|
1349 | ... for x in iterator:
|
---|
1350 | ... yield x[:]
|
---|
1351 |
|
---|
1352 | >>> for n in range(10):
|
---|
1353 | ... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
|
---|
1354 | ... print n, len(all), all[0] == [0] * n, all[-1] == [1] * n
|
---|
1355 | 0 1 True True
|
---|
1356 | 1 2 True True
|
---|
1357 | 2 4 True True
|
---|
1358 | 3 8 True True
|
---|
1359 | 4 16 True True
|
---|
1360 | 5 32 True True
|
---|
1361 | 6 64 True True
|
---|
1362 | 7 128 True True
|
---|
1363 | 8 256 True True
|
---|
1364 | 9 512 True True
|
---|
1365 |
|
---|
1366 | And run an 8-queens solver.
|
---|
1367 |
|
---|
1368 | >>> q = Queens(8)
|
---|
1369 | >>> LIMIT = 2
|
---|
1370 | >>> count = 0
|
---|
1371 | >>> for row2col in q.solve():
|
---|
1372 | ... count += 1
|
---|
1373 | ... if count <= LIMIT:
|
---|
1374 | ... print "Solution", count
|
---|
1375 | ... q.printsolution(row2col)
|
---|
1376 | Solution 1
|
---|
1377 | +-+-+-+-+-+-+-+-+
|
---|
1378 | |Q| | | | | | | |
|
---|
1379 | +-+-+-+-+-+-+-+-+
|
---|
1380 | | | | | |Q| | | |
|
---|
1381 | +-+-+-+-+-+-+-+-+
|
---|
1382 | | | | | | | | |Q|
|
---|
1383 | +-+-+-+-+-+-+-+-+
|
---|
1384 | | | | | | |Q| | |
|
---|
1385 | +-+-+-+-+-+-+-+-+
|
---|
1386 | | | |Q| | | | | |
|
---|
1387 | +-+-+-+-+-+-+-+-+
|
---|
1388 | | | | | | | |Q| |
|
---|
1389 | +-+-+-+-+-+-+-+-+
|
---|
1390 | | |Q| | | | | | |
|
---|
1391 | +-+-+-+-+-+-+-+-+
|
---|
1392 | | | | |Q| | | | |
|
---|
1393 | +-+-+-+-+-+-+-+-+
|
---|
1394 | Solution 2
|
---|
1395 | +-+-+-+-+-+-+-+-+
|
---|
1396 | |Q| | | | | | | |
|
---|
1397 | +-+-+-+-+-+-+-+-+
|
---|
1398 | | | | | | |Q| | |
|
---|
1399 | +-+-+-+-+-+-+-+-+
|
---|
1400 | | | | | | | | |Q|
|
---|
1401 | +-+-+-+-+-+-+-+-+
|
---|
1402 | | | |Q| | | | | |
|
---|
1403 | +-+-+-+-+-+-+-+-+
|
---|
1404 | | | | | | | |Q| |
|
---|
1405 | +-+-+-+-+-+-+-+-+
|
---|
1406 | | | | |Q| | | | |
|
---|
1407 | +-+-+-+-+-+-+-+-+
|
---|
1408 | | |Q| | | | | | |
|
---|
1409 | +-+-+-+-+-+-+-+-+
|
---|
1410 | | | | | |Q| | | |
|
---|
1411 | +-+-+-+-+-+-+-+-+
|
---|
1412 |
|
---|
1413 | >>> print count, "solutions in all."
|
---|
1414 | 92 solutions in all.
|
---|
1415 |
|
---|
1416 | And run a Knight's Tour on a 10x10 board. Note that there are about
|
---|
1417 | 20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
|
---|
1418 |
|
---|
1419 | >>> k = Knights(10, 10)
|
---|
1420 | >>> LIMIT = 2
|
---|
1421 | >>> count = 0
|
---|
1422 | >>> for x in k.solve():
|
---|
1423 | ... count += 1
|
---|
1424 | ... if count <= LIMIT:
|
---|
1425 | ... print "Solution", count
|
---|
1426 | ... k.printsolution(x)
|
---|
1427 | ... else:
|
---|
1428 | ... break
|
---|
1429 | Solution 1
|
---|
1430 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1431 | | 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
|
---|
1432 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1433 | | 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
|
---|
1434 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1435 | | 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
|
---|
1436 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1437 | | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
|
---|
1438 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1439 | | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
|
---|
1440 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1441 | | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
|
---|
1442 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1443 | | 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
|
---|
1444 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1445 | | 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
|
---|
1446 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1447 | | 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
|
---|
1448 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1449 | | 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
|
---|
1450 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1451 | Solution 2
|
---|
1452 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1453 | | 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
|
---|
1454 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1455 | | 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
|
---|
1456 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1457 | | 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
|
---|
1458 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1459 | | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
|
---|
1460 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1461 | | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
|
---|
1462 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1463 | | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
|
---|
1464 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1465 | | 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
|
---|
1466 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1467 | | 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
|
---|
1468 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1469 | | 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
|
---|
1470 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1471 | | 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
|
---|
1472 | +---+---+---+---+---+---+---+---+---+---+
|
---|
1473 | """
|
---|
1474 |
|
---|
1475 | weakref_tests = """\
|
---|
1476 | Generators are weakly referencable:
|
---|
1477 |
|
---|
1478 | >>> import weakref
|
---|
1479 | >>> def gen():
|
---|
1480 | ... yield 'foo!'
|
---|
1481 | ...
|
---|
1482 | >>> wr = weakref.ref(gen)
|
---|
1483 | >>> wr() is gen
|
---|
1484 | True
|
---|
1485 | >>> p = weakref.proxy(gen)
|
---|
1486 |
|
---|
1487 | Generator-iterators are weakly referencable as well:
|
---|
1488 |
|
---|
1489 | >>> gi = gen()
|
---|
1490 | >>> wr = weakref.ref(gi)
|
---|
1491 | >>> wr() is gi
|
---|
1492 | True
|
---|
1493 | >>> p = weakref.proxy(gi)
|
---|
1494 | >>> list(p)
|
---|
1495 | ['foo!']
|
---|
1496 |
|
---|
1497 | """
|
---|
1498 |
|
---|
1499 | coroutine_tests = """\
|
---|
1500 | Sending a value into a started generator:
|
---|
1501 |
|
---|
1502 | >>> def f():
|
---|
1503 | ... print (yield 1)
|
---|
1504 | ... yield 2
|
---|
1505 | >>> g = f()
|
---|
1506 | >>> g.next()
|
---|
1507 | 1
|
---|
1508 | >>> g.send(42)
|
---|
1509 | 42
|
---|
1510 | 2
|
---|
1511 |
|
---|
1512 | Sending a value into a new generator produces a TypeError:
|
---|
1513 |
|
---|
1514 | >>> f().send("foo")
|
---|
1515 | Traceback (most recent call last):
|
---|
1516 | ...
|
---|
1517 | TypeError: can't send non-None value to a just-started generator
|
---|
1518 |
|
---|
1519 |
|
---|
1520 | Yield by itself yields None:
|
---|
1521 |
|
---|
1522 | >>> def f(): yield
|
---|
1523 | >>> list(f())
|
---|
1524 | [None]
|
---|
1525 |
|
---|
1526 |
|
---|
1527 |
|
---|
1528 | An obscene abuse of a yield expression within a generator expression:
|
---|
1529 |
|
---|
1530 | >>> list((yield 21) for i in range(4))
|
---|
1531 | [21, None, 21, None, 21, None, 21, None]
|
---|
1532 |
|
---|
1533 | And a more sane, but still weird usage:
|
---|
1534 |
|
---|
1535 | >>> def f(): list(i for i in [(yield 26)])
|
---|
1536 | >>> type(f())
|
---|
1537 | <type 'generator'>
|
---|
1538 |
|
---|
1539 |
|
---|
1540 | A yield expression with augmented assignment.
|
---|
1541 |
|
---|
1542 | >>> def coroutine(seq):
|
---|
1543 | ... count = 0
|
---|
1544 | ... while count < 200:
|
---|
1545 | ... count += yield
|
---|
1546 | ... seq.append(count)
|
---|
1547 | >>> seq = []
|
---|
1548 | >>> c = coroutine(seq)
|
---|
1549 | >>> c.next()
|
---|
1550 | >>> print seq
|
---|
1551 | []
|
---|
1552 | >>> c.send(10)
|
---|
1553 | >>> print seq
|
---|
1554 | [10]
|
---|
1555 | >>> c.send(10)
|
---|
1556 | >>> print seq
|
---|
1557 | [10, 20]
|
---|
1558 | >>> c.send(10)
|
---|
1559 | >>> print seq
|
---|
1560 | [10, 20, 30]
|
---|
1561 |
|
---|
1562 |
|
---|
1563 | Check some syntax errors for yield expressions:
|
---|
1564 |
|
---|
1565 | >>> f=lambda: (yield 1),(yield 2)
|
---|
1566 | Traceback (most recent call last):
|
---|
1567 | ...
|
---|
1568 | File "<doctest test.test_generators.__test__.coroutine[21]>", line 1
|
---|
1569 | SyntaxError: 'yield' outside function
|
---|
1570 |
|
---|
1571 | >>> def f(): return lambda x=(yield): 1
|
---|
1572 | Traceback (most recent call last):
|
---|
1573 | ...
|
---|
1574 | SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[22]>, line 1)
|
---|
1575 |
|
---|
1576 | >>> def f(): x = yield = y
|
---|
1577 | Traceback (most recent call last):
|
---|
1578 | ...
|
---|
1579 | File "<doctest test.test_generators.__test__.coroutine[23]>", line 1
|
---|
1580 | SyntaxError: assignment to yield expression not possible
|
---|
1581 |
|
---|
1582 | >>> def f(): (yield bar) = y
|
---|
1583 | Traceback (most recent call last):
|
---|
1584 | ...
|
---|
1585 | File "<doctest test.test_generators.__test__.coroutine[24]>", line 1
|
---|
1586 | SyntaxError: can't assign to yield expression
|
---|
1587 |
|
---|
1588 | >>> def f(): (yield bar) += y
|
---|
1589 | Traceback (most recent call last):
|
---|
1590 | ...
|
---|
1591 | File "<doctest test.test_generators.__test__.coroutine[25]>", line 1
|
---|
1592 | SyntaxError: can't assign to yield expression
|
---|
1593 |
|
---|
1594 |
|
---|
1595 | Now check some throw() conditions:
|
---|
1596 |
|
---|
1597 | >>> def f():
|
---|
1598 | ... while True:
|
---|
1599 | ... try:
|
---|
1600 | ... print (yield)
|
---|
1601 | ... except ValueError,v:
|
---|
1602 | ... print "caught ValueError (%s)" % (v),
|
---|
1603 | >>> import sys
|
---|
1604 | >>> g = f()
|
---|
1605 | >>> g.next()
|
---|
1606 |
|
---|
1607 | >>> g.throw(ValueError) # type only
|
---|
1608 | caught ValueError ()
|
---|
1609 |
|
---|
1610 | >>> g.throw(ValueError("xyz")) # value only
|
---|
1611 | caught ValueError (xyz)
|
---|
1612 |
|
---|
1613 | >>> g.throw(ValueError, ValueError(1)) # value+matching type
|
---|
1614 | caught ValueError (1)
|
---|
1615 |
|
---|
1616 | >>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
|
---|
1617 | caught ValueError (1)
|
---|
1618 |
|
---|
1619 | >>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
|
---|
1620 | caught ValueError (1)
|
---|
1621 |
|
---|
1622 | >>> g.throw(ValueError(1), "foo") # bad args
|
---|
1623 | Traceback (most recent call last):
|
---|
1624 | ...
|
---|
1625 | TypeError: instance exception may not have a separate value
|
---|
1626 |
|
---|
1627 | >>> g.throw(ValueError, "foo", 23) # bad args
|
---|
1628 | Traceback (most recent call last):
|
---|
1629 | ...
|
---|
1630 | TypeError: throw() third argument must be a traceback object
|
---|
1631 |
|
---|
1632 | >>> def throw(g,exc):
|
---|
1633 | ... try:
|
---|
1634 | ... raise exc
|
---|
1635 | ... except:
|
---|
1636 | ... g.throw(*sys.exc_info())
|
---|
1637 | >>> throw(g,ValueError) # do it with traceback included
|
---|
1638 | caught ValueError ()
|
---|
1639 |
|
---|
1640 | >>> g.send(1)
|
---|
1641 | 1
|
---|
1642 |
|
---|
1643 | >>> throw(g,TypeError) # terminate the generator
|
---|
1644 | Traceback (most recent call last):
|
---|
1645 | ...
|
---|
1646 | TypeError
|
---|
1647 |
|
---|
1648 | >>> print g.gi_frame
|
---|
1649 | None
|
---|
1650 |
|
---|
1651 | >>> g.send(2)
|
---|
1652 | Traceback (most recent call last):
|
---|
1653 | ...
|
---|
1654 | StopIteration
|
---|
1655 |
|
---|
1656 | >>> g.throw(ValueError,6) # throw on closed generator
|
---|
1657 | Traceback (most recent call last):
|
---|
1658 | ...
|
---|
1659 | ValueError: 6
|
---|
1660 |
|
---|
1661 | >>> f().throw(ValueError,7) # throw on just-opened generator
|
---|
1662 | Traceback (most recent call last):
|
---|
1663 | ...
|
---|
1664 | ValueError: 7
|
---|
1665 |
|
---|
1666 | >>> f().throw("abc") # throw on just-opened generator
|
---|
1667 | Traceback (most recent call last):
|
---|
1668 | ...
|
---|
1669 | TypeError: exceptions must be classes, or instances, not str
|
---|
1670 |
|
---|
1671 | Now let's try closing a generator:
|
---|
1672 |
|
---|
1673 | >>> def f():
|
---|
1674 | ... try: yield
|
---|
1675 | ... except GeneratorExit:
|
---|
1676 | ... print "exiting"
|
---|
1677 |
|
---|
1678 | >>> g = f()
|
---|
1679 | >>> g.next()
|
---|
1680 | >>> g.close()
|
---|
1681 | exiting
|
---|
1682 | >>> g.close() # should be no-op now
|
---|
1683 |
|
---|
1684 | >>> f().close() # close on just-opened generator should be fine
|
---|
1685 |
|
---|
1686 | >>> def f(): yield # an even simpler generator
|
---|
1687 | >>> f().close() # close before opening
|
---|
1688 | >>> g = f()
|
---|
1689 | >>> g.next()
|
---|
1690 | >>> g.close() # close normally
|
---|
1691 |
|
---|
1692 | And finalization:
|
---|
1693 |
|
---|
1694 | >>> def f():
|
---|
1695 | ... try: yield
|
---|
1696 | ... finally:
|
---|
1697 | ... print "exiting"
|
---|
1698 |
|
---|
1699 | >>> g = f()
|
---|
1700 | >>> g.next()
|
---|
1701 | >>> del g
|
---|
1702 | exiting
|
---|
1703 |
|
---|
1704 | >>> class context(object):
|
---|
1705 | ... def __enter__(self): pass
|
---|
1706 | ... def __exit__(self, *args): print 'exiting'
|
---|
1707 | >>> def f():
|
---|
1708 | ... with context():
|
---|
1709 | ... yield
|
---|
1710 | >>> g = f()
|
---|
1711 | >>> g.next()
|
---|
1712 | >>> del g
|
---|
1713 | exiting
|
---|
1714 |
|
---|
1715 |
|
---|
1716 | GeneratorExit is not caught by except Exception:
|
---|
1717 |
|
---|
1718 | >>> def f():
|
---|
1719 | ... try: yield
|
---|
1720 | ... except Exception: print 'except'
|
---|
1721 | ... finally: print 'finally'
|
---|
1722 |
|
---|
1723 | >>> g = f()
|
---|
1724 | >>> g.next()
|
---|
1725 | >>> del g
|
---|
1726 | finally
|
---|
1727 |
|
---|
1728 |
|
---|
1729 | Now let's try some ill-behaved generators:
|
---|
1730 |
|
---|
1731 | >>> def f():
|
---|
1732 | ... try: yield
|
---|
1733 | ... except GeneratorExit:
|
---|
1734 | ... yield "foo!"
|
---|
1735 | >>> g = f()
|
---|
1736 | >>> g.next()
|
---|
1737 | >>> g.close()
|
---|
1738 | Traceback (most recent call last):
|
---|
1739 | ...
|
---|
1740 | RuntimeError: generator ignored GeneratorExit
|
---|
1741 | >>> g.close()
|
---|
1742 |
|
---|
1743 |
|
---|
1744 | Our ill-behaved code should be invoked during GC:
|
---|
1745 |
|
---|
1746 | >>> import sys, StringIO
|
---|
1747 | >>> old, sys.stderr = sys.stderr, StringIO.StringIO()
|
---|
1748 | >>> g = f()
|
---|
1749 | >>> g.next()
|
---|
1750 | >>> del g
|
---|
1751 | >>> sys.stderr.getvalue().startswith(
|
---|
1752 | ... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
|
---|
1753 | ... )
|
---|
1754 | True
|
---|
1755 | >>> sys.stderr = old
|
---|
1756 |
|
---|
1757 |
|
---|
1758 | And errors thrown during closing should propagate:
|
---|
1759 |
|
---|
1760 | >>> def f():
|
---|
1761 | ... try: yield
|
---|
1762 | ... except GeneratorExit:
|
---|
1763 | ... raise TypeError("fie!")
|
---|
1764 | >>> g = f()
|
---|
1765 | >>> g.next()
|
---|
1766 | >>> g.close()
|
---|
1767 | Traceback (most recent call last):
|
---|
1768 | ...
|
---|
1769 | TypeError: fie!
|
---|
1770 |
|
---|
1771 |
|
---|
1772 | Ensure that various yield expression constructs make their
|
---|
1773 | enclosing function a generator:
|
---|
1774 |
|
---|
1775 | >>> def f(): x += yield
|
---|
1776 | >>> type(f())
|
---|
1777 | <type 'generator'>
|
---|
1778 |
|
---|
1779 | >>> def f(): x = yield
|
---|
1780 | >>> type(f())
|
---|
1781 | <type 'generator'>
|
---|
1782 |
|
---|
1783 | >>> def f(): lambda x=(yield): 1
|
---|
1784 | >>> type(f())
|
---|
1785 | <type 'generator'>
|
---|
1786 |
|
---|
1787 | >>> def f(): x=(i for i in (yield) if (yield))
|
---|
1788 | >>> type(f())
|
---|
1789 | <type 'generator'>
|
---|
1790 |
|
---|
1791 | >>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
|
---|
1792 | >>> data = [1,2]
|
---|
1793 | >>> g = f(data)
|
---|
1794 | >>> type(g)
|
---|
1795 | <type 'generator'>
|
---|
1796 | >>> g.send(None)
|
---|
1797 | 'a'
|
---|
1798 | >>> data
|
---|
1799 | [1, 2]
|
---|
1800 | >>> g.send(0)
|
---|
1801 | 'b'
|
---|
1802 | >>> data
|
---|
1803 | [27, 2]
|
---|
1804 | >>> try: g.send(1)
|
---|
1805 | ... except StopIteration: pass
|
---|
1806 | >>> data
|
---|
1807 | [27, 27]
|
---|
1808 |
|
---|
1809 | """
|
---|
1810 |
|
---|
1811 | refleaks_tests = """
|
---|
1812 | Prior to adding cycle-GC support to itertools.tee, this code would leak
|
---|
1813 | references. We add it to the standard suite so the routine refleak-tests
|
---|
1814 | would trigger if it starts being uncleanable again.
|
---|
1815 |
|
---|
1816 | >>> import itertools
|
---|
1817 | >>> def leak():
|
---|
1818 | ... class gen:
|
---|
1819 | ... def __iter__(self):
|
---|
1820 | ... return self
|
---|
1821 | ... def next(self):
|
---|
1822 | ... return self.item
|
---|
1823 | ... g = gen()
|
---|
1824 | ... head, tail = itertools.tee(g)
|
---|
1825 | ... g.item = head
|
---|
1826 | ... return head
|
---|
1827 | >>> it = leak()
|
---|
1828 |
|
---|
1829 | Make sure to also test the involvement of the tee-internal teedataobject,
|
---|
1830 | which stores returned items.
|
---|
1831 |
|
---|
1832 | >>> item = it.next()
|
---|
1833 |
|
---|
1834 |
|
---|
1835 |
|
---|
1836 | This test leaked at one point due to generator finalization/destruction.
|
---|
1837 | It was copied from Lib/test/leakers/test_generator_cycle.py before the file
|
---|
1838 | was removed.
|
---|
1839 |
|
---|
1840 | >>> def leak():
|
---|
1841 | ... def gen():
|
---|
1842 | ... while True:
|
---|
1843 | ... yield g
|
---|
1844 | ... g = gen()
|
---|
1845 |
|
---|
1846 | >>> leak()
|
---|
1847 |
|
---|
1848 |
|
---|
1849 |
|
---|
1850 | This test isn't really generator related, but rather exception-in-cleanup
|
---|
1851 | related. The coroutine tests (above) just happen to cause an exception in
|
---|
1852 | the generator's __del__ (tp_del) method. We can also test for this
|
---|
1853 | explicitly, without generators. We do have to redirect stderr to avoid
|
---|
1854 | printing warnings and to doublecheck that we actually tested what we wanted
|
---|
1855 | to test.
|
---|
1856 |
|
---|
1857 | >>> import sys, StringIO
|
---|
1858 | >>> old = sys.stderr
|
---|
1859 | >>> try:
|
---|
1860 | ... sys.stderr = StringIO.StringIO()
|
---|
1861 | ... class Leaker:
|
---|
1862 | ... def __del__(self):
|
---|
1863 | ... raise RuntimeError
|
---|
1864 | ...
|
---|
1865 | ... l = Leaker()
|
---|
1866 | ... del l
|
---|
1867 | ... err = sys.stderr.getvalue().strip()
|
---|
1868 | ... err.startswith(
|
---|
1869 | ... "Exception RuntimeError: RuntimeError() in <"
|
---|
1870 | ... )
|
---|
1871 | ... err.endswith("> ignored")
|
---|
1872 | ... len(err.splitlines())
|
---|
1873 | ... finally:
|
---|
1874 | ... sys.stderr = old
|
---|
1875 | True
|
---|
1876 | True
|
---|
1877 | 1
|
---|
1878 |
|
---|
1879 |
|
---|
1880 |
|
---|
1881 | These refleak tests should perhaps be in a testfile of their own,
|
---|
1882 | test_generators just happened to be the test that drew these out.
|
---|
1883 |
|
---|
1884 | """
|
---|
1885 |
|
---|
1886 | __test__ = {"tut": tutorial_tests,
|
---|
1887 | "pep": pep_tests,
|
---|
1888 | "email": email_tests,
|
---|
1889 | "fun": fun_tests,
|
---|
1890 | "syntax": syntax_tests,
|
---|
1891 | "conjoin": conjoin_tests,
|
---|
1892 | "weakref": weakref_tests,
|
---|
1893 | "coroutine": coroutine_tests,
|
---|
1894 | "refleaks": refleaks_tests,
|
---|
1895 | }
|
---|
1896 |
|
---|
1897 | # Magic test name that regrtest.py invokes *after* importing this module.
|
---|
1898 | # This worms around a bootstrap problem.
|
---|
1899 | # Note that doctest and regrtest both look in sys.argv for a "-v" argument,
|
---|
1900 | # so this works as expected in both ways of running regrtest.
|
---|
1901 | def test_main(verbose=None):
|
---|
1902 | from test import test_support, test_generators
|
---|
1903 | test_support.run_doctest(test_generators, verbose)
|
---|
1904 |
|
---|
1905 | # This part isn't needed for regrtest, but for running the test directly.
|
---|
1906 | if __name__ == "__main__":
|
---|
1907 | test_main(1)
|
---|