1import copy
2import gc
3import pickle
4import sys
5import unittest
6import warnings
7import weakref
8import inspect
9import types
10
11from test import support
12
13
14class FinalizationTest(unittest.TestCase):
15
16    def test_frame_resurrect(self):
17        # A generator frame can be resurrected by a generator's finalization.
18        def gen():
19            nonlocal frame
20            try:
21                yield
22            finally:
23                frame = sys._getframe()
24
25        g = gen()
26        wr = weakref.ref(g)
27        next(g)
28        del g
29        support.gc_collect()
30        self.assertIs(wr(), None)
31        self.assertTrue(frame)
32        del frame
33        support.gc_collect()
34
35    def test_refcycle(self):
36        # A generator caught in a refcycle gets finalized anyway.
37        old_garbage = gc.garbage[:]
38        finalized = False
39        def gen():
40            nonlocal finalized
41            try:
42                g = yield
43                yield 1
44            finally:
45                finalized = True
46
47        g = gen()
48        next(g)
49        g.send(g)
50        self.assertGreater(sys.getrefcount(g), 2)
51        self.assertFalse(finalized)
52        del g
53        support.gc_collect()
54        self.assertTrue(finalized)
55        self.assertEqual(gc.garbage, old_garbage)
56
57    def test_lambda_generator(self):
58        # Issue #23192: Test that a lambda returning a generator behaves
59        # like the equivalent function
60        f = lambda: (yield 1)
61        def g(): return (yield 1)
62
63        # test 'yield from'
64        f2 = lambda: (yield from g())
65        def g2(): return (yield from g())
66
67        f3 = lambda: (yield from f())
68        def g3(): return (yield from f())
69
70        for gen_fun in (f, g, f2, g2, f3, g3):
71            gen = gen_fun()
72            self.assertEqual(next(gen), 1)
73            with self.assertRaises(StopIteration) as cm:
74                gen.send(2)
75            self.assertEqual(cm.exception.value, 2)
76
77
78class GeneratorTest(unittest.TestCase):
79
80    def test_name(self):
81        def func():
82            yield 1
83
84        # check generator names
85        gen = func()
86        self.assertEqual(gen.__name__, "func")
87        self.assertEqual(gen.__qualname__,
88                         "GeneratorTest.test_name.<locals>.func")
89
90        # modify generator names
91        gen.__name__ = "name"
92        gen.__qualname__ = "qualname"
93        self.assertEqual(gen.__name__, "name")
94        self.assertEqual(gen.__qualname__, "qualname")
95
96        # generator names must be a string and cannot be deleted
97        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
98        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
99        self.assertRaises(TypeError, delattr, gen, '__name__')
100        self.assertRaises(TypeError, delattr, gen, '__qualname__')
101
102        # modify names of the function creating the generator
103        func.__qualname__ = "func_qualname"
104        func.__name__ = "func_name"
105        gen = func()
106        self.assertEqual(gen.__name__, "func_name")
107        self.assertEqual(gen.__qualname__, "func_qualname")
108
109        # unnamed generator
110        gen = (x for x in range(10))
111        self.assertEqual(gen.__name__,
112                         "<genexpr>")
113        self.assertEqual(gen.__qualname__,
114                         "GeneratorTest.test_name.<locals>.<genexpr>")
115
116    def test_copy(self):
117        def f():
118            yield 1
119        g = f()
120        with self.assertRaises(TypeError):
121            copy.copy(g)
122
123    def test_pickle(self):
124        def f():
125            yield 1
126        g = f()
127        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
128            with self.assertRaises((TypeError, pickle.PicklingError)):
129                pickle.dumps(g, proto)
130
131
132class ExceptionTest(unittest.TestCase):
133    # Tests for the issue #23353: check that the currently handled exception
134    # is correctly saved/restored in PyEval_EvalFrameEx().
135
136    def test_except_throw(self):
137        def store_raise_exc_generator():
138            try:
139                self.assertEqual(sys.exc_info()[0], None)
140                yield
141            except Exception as exc:
142                # exception raised by gen.throw(exc)
143                self.assertEqual(sys.exc_info()[0], ValueError)
144                self.assertIsNone(exc.__context__)
145                yield
146
147                # ensure that the exception is not lost
148                self.assertEqual(sys.exc_info()[0], ValueError)
149                yield
150
151                # we should be able to raise back the ValueError
152                raise
153
154        make = store_raise_exc_generator()
155        next(make)
156
157        try:
158            raise ValueError()
159        except Exception as exc:
160            try:
161                make.throw(exc)
162            except Exception:
163                pass
164
165        next(make)
166        with self.assertRaises(ValueError) as cm:
167            next(make)
168        self.assertIsNone(cm.exception.__context__)
169
170        self.assertEqual(sys.exc_info(), (None, None, None))
171
172    def test_except_next(self):
173        def gen():
174            self.assertEqual(sys.exc_info()[0], ValueError)
175            yield "done"
176
177        g = gen()
178        try:
179            raise ValueError
180        except Exception:
181            self.assertEqual(next(g), "done")
182        self.assertEqual(sys.exc_info(), (None, None, None))
183
184    def test_except_gen_except(self):
185        def gen():
186            try:
187                self.assertEqual(sys.exc_info()[0], None)
188                yield
189                # we are called from "except ValueError:", TypeError must
190                # inherit ValueError in its context
191                raise TypeError()
192            except TypeError as exc:
193                self.assertEqual(sys.exc_info()[0], TypeError)
194                self.assertEqual(type(exc.__context__), ValueError)
195            # here we are still called from the "except ValueError:"
196            self.assertEqual(sys.exc_info()[0], ValueError)
197            yield
198            self.assertIsNone(sys.exc_info()[0])
199            yield "done"
200
201        g = gen()
202        next(g)
203        try:
204            raise ValueError
205        except Exception:
206            next(g)
207
208        self.assertEqual(next(g), "done")
209        self.assertEqual(sys.exc_info(), (None, None, None))
210
211    def test_except_throw_exception_context(self):
212        def gen():
213            try:
214                try:
215                    self.assertEqual(sys.exc_info()[0], None)
216                    yield
217                except ValueError:
218                    # we are called from "except ValueError:"
219                    self.assertEqual(sys.exc_info()[0], ValueError)
220                    raise TypeError()
221            except Exception as exc:
222                self.assertEqual(sys.exc_info()[0], TypeError)
223                self.assertEqual(type(exc.__context__), ValueError)
224            # we are still called from "except ValueError:"
225            self.assertEqual(sys.exc_info()[0], ValueError)
226            yield
227            self.assertIsNone(sys.exc_info()[0])
228            yield "done"
229
230        g = gen()
231        next(g)
232        try:
233            raise ValueError
234        except Exception as exc:
235            g.throw(exc)
236
237        self.assertEqual(next(g), "done")
238        self.assertEqual(sys.exc_info(), (None, None, None))
239
240    def test_stopiteration_warning(self):
241        # See also PEP 479.
242
243        def gen():
244            raise StopIteration
245            yield
246
247        with self.assertRaises(StopIteration), \
248             self.assertWarnsRegex(DeprecationWarning, "StopIteration"):
249
250            next(gen())
251
252        with self.assertRaisesRegex(DeprecationWarning,
253                                    "generator .* raised StopIteration"), \
254             warnings.catch_warnings():
255
256            warnings.simplefilter('error')
257            next(gen())
258
259
260    def test_tutorial_stopiteration(self):
261        # Raise StopIteration" stops the generator too:
262
263        def f():
264            yield 1
265            raise StopIteration
266            yield 2 # never reached
267
268        g = f()
269        self.assertEqual(next(g), 1)
270
271        with self.assertWarnsRegex(DeprecationWarning, "StopIteration"):
272            with self.assertRaises(StopIteration):
273                next(g)
274
275        with self.assertRaises(StopIteration):
276            # This time StopIteration isn't raised from the generator's body,
277            # hence no warning.
278            next(g)
279
280    def test_return_tuple(self):
281        def g():
282            return (yield 1)
283
284        gen = g()
285        self.assertEqual(next(gen), 1)
286        with self.assertRaises(StopIteration) as cm:
287            gen.send((2,))
288        self.assertEqual(cm.exception.value, (2,))
289
290    def test_return_stopiteration(self):
291        def g():
292            return (yield 1)
293
294        gen = g()
295        self.assertEqual(next(gen), 1)
296        with self.assertRaises(StopIteration) as cm:
297            gen.send(StopIteration(2))
298        self.assertIsInstance(cm.exception.value, StopIteration)
299        self.assertEqual(cm.exception.value.value, 2)
300
301
302class YieldFromTests(unittest.TestCase):
303    def test_generator_gi_yieldfrom(self):
304        def a():
305            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
306            self.assertIsNone(gen_b.gi_yieldfrom)
307            yield
308            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
309            self.assertIsNone(gen_b.gi_yieldfrom)
310
311        def b():
312            self.assertIsNone(gen_b.gi_yieldfrom)
313            yield from a()
314            self.assertIsNone(gen_b.gi_yieldfrom)
315            yield
316            self.assertIsNone(gen_b.gi_yieldfrom)
317
318        gen_b = b()
319        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
320        self.assertIsNone(gen_b.gi_yieldfrom)
321
322        gen_b.send(None)
323        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
324        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
325
326        gen_b.send(None)
327        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
328        self.assertIsNone(gen_b.gi_yieldfrom)
329
330        [] = gen_b  # Exhaust generator
331        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
332        self.assertIsNone(gen_b.gi_yieldfrom)
333
334
335tutorial_tests = """
336Let's try a simple generator:
337
338    >>> def f():
339    ...    yield 1
340    ...    yield 2
341
342    >>> for i in f():
343    ...     print(i)
344    1
345    2
346    >>> g = f()
347    >>> next(g)
348    1
349    >>> next(g)
350    2
351
352"Falling off the end" stops the generator:
353
354    >>> next(g)
355    Traceback (most recent call last):
356      File "<stdin>", line 1, in ?
357      File "<stdin>", line 2, in g
358    StopIteration
359
360"return" also stops the generator:
361
362    >>> def f():
363    ...     yield 1
364    ...     return
365    ...     yield 2 # never reached
366    ...
367    >>> g = f()
368    >>> next(g)
369    1
370    >>> next(g)
371    Traceback (most recent call last):
372      File "<stdin>", line 1, in ?
373      File "<stdin>", line 3, in f
374    StopIteration
375    >>> next(g) # once stopped, can't be resumed
376    Traceback (most recent call last):
377      File "<stdin>", line 1, in ?
378    StopIteration
379
380However, "return" and StopIteration are not exactly equivalent:
381
382    >>> def g1():
383    ...     try:
384    ...         return
385    ...     except:
386    ...         yield 1
387    ...
388    >>> list(g1())
389    []
390
391    >>> def g2():
392    ...     try:
393    ...         raise StopIteration
394    ...     except:
395    ...         yield 42
396    >>> print(list(g2()))
397    [42]
398
399This may be surprising at first:
400
401    >>> def g3():
402    ...     try:
403    ...         return
404    ...     finally:
405    ...         yield 1
406    ...
407    >>> list(g3())
408    [1]
409
410Let's create an alternate range() function implemented as a generator:
411
412    >>> def yrange(n):
413    ...     for i in range(n):
414    ...         yield i
415    ...
416    >>> list(yrange(5))
417    [0, 1, 2, 3, 4]
418
419Generators always return to the most recent caller:
420
421    >>> def creator():
422    ...     r = yrange(5)
423    ...     print("creator", next(r))
424    ...     return r
425    ...
426    >>> def caller():
427    ...     r = creator()
428    ...     for i in r:
429    ...             print("caller", i)
430    ...
431    >>> caller()
432    creator 0
433    caller 1
434    caller 2
435    caller 3
436    caller 4
437
438Generators can call other generators:
439
440    >>> def zrange(n):
441    ...     for i in yrange(n):
442    ...         yield i
443    ...
444    >>> list(zrange(5))
445    [0, 1, 2, 3, 4]
446
447"""
448
449# The examples from PEP 255.
450
451pep_tests = """
452
453Specification:  Yield
454
455    Restriction:  A generator cannot be resumed while it is actively
456    running:
457
458    >>> def g():
459    ...     i = next(me)
460    ...     yield i
461    >>> me = g()
462    >>> next(me)
463    Traceback (most recent call last):
464     ...
465      File "<string>", line 2, in g
466    ValueError: generator already executing
467
468Specification: Return
469
470    Note that return isn't always equivalent to raising StopIteration:  the
471    difference lies in how enclosing try/except constructs are treated.
472    For example,
473
474        >>> def f1():
475        ...     try:
476        ...         return
477        ...     except:
478        ...        yield 1
479        >>> print(list(f1()))
480        []
481
482    because, as in any function, return simply exits, but
483
484        >>> def f2():
485        ...     try:
486        ...         raise StopIteration
487        ...     except:
488        ...         yield 42
489        >>> print(list(f2()))
490        [42]
491
492    because StopIteration is captured by a bare "except", as is any
493    exception.
494
495Specification: Generators and Exception Propagation
496
497    >>> def f():
498    ...     return 1//0
499    >>> def g():
500    ...     yield f()  # the zero division exception propagates
501    ...     yield 42   # and we'll never get here
502    >>> k = g()
503    >>> next(k)
504    Traceback (most recent call last):
505      File "<stdin>", line 1, in ?
506      File "<stdin>", line 2, in g
507      File "<stdin>", line 2, in f
508    ZeroDivisionError: integer division or modulo by zero
509    >>> next(k)  # and the generator cannot be resumed
510    Traceback (most recent call last):
511      File "<stdin>", line 1, in ?
512    StopIteration
513    >>>
514
515Specification: Try/Except/Finally
516
517    >>> def f():
518    ...     try:
519    ...         yield 1
520    ...         try:
521    ...             yield 2
522    ...             1//0
523    ...             yield 3  # never get here
524    ...         except ZeroDivisionError:
525    ...             yield 4
526    ...             yield 5
527    ...             raise
528    ...         except:
529    ...             yield 6
530    ...         yield 7     # the "raise" above stops this
531    ...     except:
532    ...         yield 8
533    ...     yield 9
534    ...     try:
535    ...         x = 12
536    ...     finally:
537    ...         yield 10
538    ...     yield 11
539    >>> print(list(f()))
540    [1, 2, 4, 5, 8, 9, 10, 11]
541    >>>
542
543Guido's binary tree example.
544
545    >>> # A binary tree class.
546    >>> class Tree:
547    ...
548    ...     def __init__(self, label, left=None, right=None):
549    ...         self.label = label
550    ...         self.left = left
551    ...         self.right = right
552    ...
553    ...     def __repr__(self, level=0, indent="    "):
554    ...         s = level*indent + repr(self.label)
555    ...         if self.left:
556    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
557    ...         if self.right:
558    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
559    ...         return s
560    ...
561    ...     def __iter__(self):
562    ...         return inorder(self)
563
564    >>> # Create a Tree from a list.
565    >>> def tree(list):
566    ...     n = len(list)
567    ...     if n == 0:
568    ...         return []
569    ...     i = n // 2
570    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
571
572    >>> # Show it off: create a tree.
573    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
574
575    >>> # A recursive generator that generates Tree labels in in-order.
576    >>> def inorder(t):
577    ...     if t:
578    ...         for x in inorder(t.left):
579    ...             yield x
580    ...         yield t.label
581    ...         for x in inorder(t.right):
582    ...             yield x
583
584    >>> # Show it off: create a tree.
585    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
586    >>> # Print the nodes of the tree in in-order.
587    >>> for x in t:
588    ...     print(' '+x, end='')
589     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
590
591    >>> # A non-recursive generator.
592    >>> def inorder(node):
593    ...     stack = []
594    ...     while node:
595    ...         while node.left:
596    ...             stack.append(node)
597    ...             node = node.left
598    ...         yield node.label
599    ...         while not node.right:
600    ...             try:
601    ...                 node = stack.pop()
602    ...             except IndexError:
603    ...                 return
604    ...             yield node.label
605    ...         node = node.right
606
607    >>> # Exercise the non-recursive generator.
608    >>> for x in t:
609    ...     print(' '+x, end='')
610     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
611
612"""
613
614# Examples from Iterator-List and Python-Dev and c.l.py.
615
616email_tests = """
617
618The difference between yielding None and returning it.
619
620>>> def g():
621...     for i in range(3):
622...         yield None
623...     yield None
624...     return
625>>> list(g())
626[None, None, None, None]
627
628Ensure that explicitly raising StopIteration acts like any other exception
629in try/except, not like a return.
630
631>>> def g():
632...     yield 1
633...     try:
634...         raise StopIteration
635...     except:
636...         yield 2
637...     yield 3
638>>> list(g())
639[1, 2, 3]
640
641Next one was posted to c.l.py.
642
643>>> def gcomb(x, k):
644...     "Generate all combinations of k elements from list x."
645...
646...     if k > len(x):
647...         return
648...     if k == 0:
649...         yield []
650...     else:
651...         first, rest = x[0], x[1:]
652...         # A combination does or doesn't contain first.
653...         # If it does, the remainder is a k-1 comb of rest.
654...         for c in gcomb(rest, k-1):
655...             c.insert(0, first)
656...             yield c
657...         # If it doesn't contain first, it's a k comb of rest.
658...         for c in gcomb(rest, k):
659...             yield c
660
661>>> seq = list(range(1, 5))
662>>> for k in range(len(seq) + 2):
663...     print("%d-combs of %s:" % (k, seq))
664...     for c in gcomb(seq, k):
665...         print("   ", c)
6660-combs of [1, 2, 3, 4]:
667    []
6681-combs of [1, 2, 3, 4]:
669    [1]
670    [2]
671    [3]
672    [4]
6732-combs of [1, 2, 3, 4]:
674    [1, 2]
675    [1, 3]
676    [1, 4]
677    [2, 3]
678    [2, 4]
679    [3, 4]
6803-combs of [1, 2, 3, 4]:
681    [1, 2, 3]
682    [1, 2, 4]
683    [1, 3, 4]
684    [2, 3, 4]
6854-combs of [1, 2, 3, 4]:
686    [1, 2, 3, 4]
6875-combs of [1, 2, 3, 4]:
688
689From the Iterators list, about the types of these things.
690
691>>> def g():
692...     yield 1
693...
694>>> type(g)
695<class 'function'>
696>>> i = g()
697>>> type(i)
698<class 'generator'>
699>>> [s for s in dir(i) if not s.startswith('_')]
700['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
701>>> from test.support import HAVE_DOCSTRINGS
702>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
703Implement next(self).
704>>> iter(i) is i
705True
706>>> import types
707>>> isinstance(i, types.GeneratorType)
708True
709
710And more, added later.
711
712>>> i.gi_running
7130
714>>> type(i.gi_frame)
715<class 'frame'>
716>>> i.gi_running = 42
717Traceback (most recent call last):
718  ...
719AttributeError: readonly attribute
720>>> def g():
721...     yield me.gi_running
722>>> me = g()
723>>> me.gi_running
7240
725>>> next(me)
7261
727>>> me.gi_running
7280
729
730A clever union-find implementation from c.l.py, due to David Eppstein.
731Sent: Friday, June 29, 2001 12:16 PM
732To: python-list@python.org
733Subject: Re: PEP 255: Simple Generators
734
735>>> class disjointSet:
736...     def __init__(self, name):
737...         self.name = name
738...         self.parent = None
739...         self.generator = self.generate()
740...
741...     def generate(self):
742...         while not self.parent:
743...             yield self
744...         for x in self.parent.generator:
745...             yield x
746...
747...     def find(self):
748...         return next(self.generator)
749...
750...     def union(self, parent):
751...         if self.parent:
752...             raise ValueError("Sorry, I'm not a root!")
753...         self.parent = parent
754...
755...     def __str__(self):
756...         return self.name
757
758>>> names = "ABCDEFGHIJKLM"
759>>> sets = [disjointSet(name) for name in names]
760>>> roots = sets[:]
761
762>>> import random
763>>> gen = random.Random(42)
764>>> while 1:
765...     for s in sets:
766...         print(" %s->%s" % (s, s.find()), end='')
767...     print()
768...     if len(roots) > 1:
769...         s1 = gen.choice(roots)
770...         roots.remove(s1)
771...         s2 = gen.choice(roots)
772...         s1.union(s2)
773...         print("merged", s1, "into", s2)
774...     else:
775...         break
776 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
777merged K into B
778 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
779merged A into F
780 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
781merged E into F
782 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
783merged D into C
784 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
785merged M into C
786 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
787merged J into B
788 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
789merged B into C
790 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
791merged F into G
792 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
793merged L into C
794 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
795merged G into I
796 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
797merged I into H
798 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
799merged C into H
800 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
801
802"""
803# Emacs turd '
804
805# Fun tests (for sufficiently warped notions of "fun").
806
807fun_tests = """
808
809Build up to a recursive Sieve of Eratosthenes generator.
810
811>>> def firstn(g, n):
812...     return [next(g) for i in range(n)]
813
814>>> def intsfrom(i):
815...     while 1:
816...         yield i
817...         i += 1
818
819>>> firstn(intsfrom(5), 7)
820[5, 6, 7, 8, 9, 10, 11]
821
822>>> def exclude_multiples(n, ints):
823...     for i in ints:
824...         if i % n:
825...             yield i
826
827>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
828[1, 2, 4, 5, 7, 8]
829
830>>> def sieve(ints):
831...     prime = next(ints)
832...     yield prime
833...     not_divisible_by_prime = exclude_multiples(prime, ints)
834...     for p in sieve(not_divisible_by_prime):
835...         yield p
836
837>>> primes = sieve(intsfrom(2))
838>>> firstn(primes, 20)
839[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
840
841
842Another famous problem:  generate all integers of the form
843    2**i * 3**j  * 5**k
844in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
845Try writing it without generators, and correctly, and without generating
8463 internal results for each result output.
847
848>>> def times(n, g):
849...     for i in g:
850...         yield n * i
851>>> firstn(times(10, intsfrom(1)), 10)
852[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
853
854>>> def merge(g, h):
855...     ng = next(g)
856...     nh = next(h)
857...     while 1:
858...         if ng < nh:
859...             yield ng
860...             ng = next(g)
861...         elif ng > nh:
862...             yield nh
863...             nh = next(h)
864...         else:
865...             yield ng
866...             ng = next(g)
867...             nh = next(h)
868
869The following works, but is doing a whale of a lot of redundant work --
870it's not clear how to get the internal uses of m235 to share a single
871generator.  Note that me_times2 (etc) each need to see every element in the
872result sequence.  So this is an example where lazy lists are more natural
873(you can look at the head of a lazy list any number of times).
874
875>>> def m235():
876...     yield 1
877...     me_times2 = times(2, m235())
878...     me_times3 = times(3, m235())
879...     me_times5 = times(5, m235())
880...     for i in merge(merge(me_times2,
881...                          me_times3),
882...                    me_times5):
883...         yield i
884
885Don't print "too many" of these -- the implementation above is extremely
886inefficient:  each call of m235() leads to 3 recursive calls, and in
887turn each of those 3 more, and so on, and so on, until we've descended
888enough levels to satisfy the print stmts.  Very odd:  when I printed 5
889lines of results below, this managed to screw up Win98's malloc in "the
890usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
891address space, and it *looked* like a very slow leak.
892
893>>> result = m235()
894>>> for i in range(3):
895...     print(firstn(result, 15))
896[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
897[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
898[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
899
900Heh.  Here's one way to get a shared list, complete with an excruciating
901namespace renaming trick.  The *pretty* part is that the times() and merge()
902functions can be reused as-is, because they only assume their stream
903arguments are iterable -- a LazyList is the same as a generator to times().
904
905>>> class LazyList:
906...     def __init__(self, g):
907...         self.sofar = []
908...         self.fetch = g.__next__
909...
910...     def __getitem__(self, i):
911...         sofar, fetch = self.sofar, self.fetch
912...         while i >= len(sofar):
913...             sofar.append(fetch())
914...         return sofar[i]
915
916>>> def m235():
917...     yield 1
918...     # Gack:  m235 below actually refers to a LazyList.
919...     me_times2 = times(2, m235)
920...     me_times3 = times(3, m235)
921...     me_times5 = times(5, m235)
922...     for i in merge(merge(me_times2,
923...                          me_times3),
924...                    me_times5):
925...         yield i
926
927Print as many of these as you like -- *this* implementation is memory-
928efficient.
929
930>>> m235 = LazyList(m235())
931>>> for i in range(5):
932...     print([m235[j] for j in range(15*i, 15*(i+1))])
933[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
934[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
935[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
936[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
937[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
938
939Ye olde Fibonacci generator, LazyList style.
940
941>>> def fibgen(a, b):
942...
943...     def sum(g, h):
944...         while 1:
945...             yield next(g) + next(h)
946...
947...     def tail(g):
948...         next(g)    # throw first away
949...         for x in g:
950...             yield x
951...
952...     yield a
953...     yield b
954...     for s in sum(iter(fib),
955...                  tail(iter(fib))):
956...         yield s
957
958>>> fib = LazyList(fibgen(1, 2))
959>>> firstn(iter(fib), 17)
960[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
961
962
963Running after your tail with itertools.tee (new in version 2.4)
964
965The algorithms "m235" (Hamming) and Fibonacci presented above are both
966examples of a whole family of FP (functional programming) algorithms
967where a function produces and returns a list while the production algorithm
968suppose the list as already produced by recursively calling itself.
969For these algorithms to work, they must:
970
971- produce at least a first element without presupposing the existence of
972  the rest of the list
973- produce their elements in a lazy manner
974
975To work efficiently, the beginning of the list must not be recomputed over
976and over again. This is ensured in most FP languages as a built-in feature.
977In python, we have to explicitly maintain a list of already computed results
978and abandon genuine recursivity.
979
980This is what had been attempted above with the LazyList class. One problem
981with that class is that it keeps a list of all of the generated results and
982therefore continually grows. This partially defeats the goal of the generator
983concept, viz. produce the results only as needed instead of producing them
984all and thereby wasting memory.
985
986Thanks to itertools.tee, it is now clear "how to get the internal uses of
987m235 to share a single generator".
988
989>>> from itertools import tee
990>>> def m235():
991...     def _m235():
992...         yield 1
993...         for n in merge(times(2, m2),
994...                        merge(times(3, m3),
995...                              times(5, m5))):
996...             yield n
997...     m1 = _m235()
998...     m2, m3, m5, mRes = tee(m1, 4)
999...     return mRes
1000
1001>>> it = m235()
1002>>> for i in range(5):
1003...     print(firstn(it, 15))
1004[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1005[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1006[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1007[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1008[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1009
1010The "tee" function does just what we want. It internally keeps a generated
1011result for as long as it has not been "consumed" from all of the duplicated
1012iterators, whereupon it is deleted. You can therefore print the hamming
1013sequence during hours without increasing memory usage, or very little.
1014
1015The beauty of it is that recursive running-after-their-tail FP algorithms
1016are quite straightforwardly expressed with this Python idiom.
1017
1018Ye olde Fibonacci generator, tee style.
1019
1020>>> def fib():
1021...
1022...     def _isum(g, h):
1023...         while 1:
1024...             yield next(g) + next(h)
1025...
1026...     def _fib():
1027...         yield 1
1028...         yield 2
1029...         next(fibTail) # throw first away
1030...         for res in _isum(fibHead, fibTail):
1031...             yield res
1032...
1033...     realfib = _fib()
1034...     fibHead, fibTail, fibRes = tee(realfib, 3)
1035...     return fibRes
1036
1037>>> firstn(fib(), 17)
1038[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1039
1040"""
1041
1042# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1043# hackery.
1044
1045syntax_tests = """
1046
1047These are fine:
1048
1049>>> def f():
1050...     yield 1
1051...     return
1052
1053>>> def f():
1054...     try:
1055...         yield 1
1056...     finally:
1057...         pass
1058
1059>>> def f():
1060...     try:
1061...         try:
1062...             1//0
1063...         except ZeroDivisionError:
1064...             yield 666
1065...         except:
1066...             pass
1067...     finally:
1068...         pass
1069
1070>>> def f():
1071...     try:
1072...         try:
1073...             yield 12
1074...             1//0
1075...         except ZeroDivisionError:
1076...             yield 666
1077...         except:
1078...             try:
1079...                 x = 12
1080...             finally:
1081...                 yield 12
1082...     except:
1083...         return
1084>>> list(f())
1085[12, 666]
1086
1087>>> def f():
1088...    yield
1089>>> type(f())
1090<class 'generator'>
1091
1092
1093>>> def f():
1094...    if 0:
1095...        yield
1096>>> type(f())
1097<class 'generator'>
1098
1099
1100>>> def f():
1101...     if 0:
1102...         yield 1
1103>>> type(f())
1104<class 'generator'>
1105
1106>>> def f():
1107...    if "":
1108...        yield None
1109>>> type(f())
1110<class 'generator'>
1111
1112>>> def f():
1113...     return
1114...     try:
1115...         if x==4:
1116...             pass
1117...         elif 0:
1118...             try:
1119...                 1//0
1120...             except SyntaxError:
1121...                 pass
1122...             else:
1123...                 if 0:
1124...                     while 12:
1125...                         x += 1
1126...                         yield 2 # don't blink
1127...                         f(a, b, c, d, e)
1128...         else:
1129...             pass
1130...     except:
1131...         x = 1
1132...     return
1133>>> type(f())
1134<class 'generator'>
1135
1136>>> def f():
1137...     if 0:
1138...         def g():
1139...             yield 1
1140...
1141>>> type(f())
1142<class 'NoneType'>
1143
1144>>> def f():
1145...     if 0:
1146...         class C:
1147...             def __init__(self):
1148...                 yield 1
1149...             def f(self):
1150...                 yield 2
1151>>> type(f())
1152<class 'NoneType'>
1153
1154>>> def f():
1155...     if 0:
1156...         return
1157...     if 0:
1158...         yield 2
1159>>> type(f())
1160<class 'generator'>
1161
1162This one caused a crash (see SF bug 567538):
1163
1164>>> def f():
1165...     for i in range(3):
1166...         try:
1167...             continue
1168...         finally:
1169...             yield i
1170...
1171>>> g = f()
1172>>> print(next(g))
11730
1174>>> print(next(g))
11751
1176>>> print(next(g))
11772
1178>>> print(next(g))
1179Traceback (most recent call last):
1180StopIteration
1181
1182
1183Test the gi_code attribute
1184
1185>>> def f():
1186...     yield 5
1187...
1188>>> g = f()
1189>>> g.gi_code is f.__code__
1190True
1191>>> next(g)
11925
1193>>> next(g)
1194Traceback (most recent call last):
1195StopIteration
1196>>> g.gi_code is f.__code__
1197True
1198
1199
1200Test the __name__ attribute and the repr()
1201
1202>>> def f():
1203...    yield 5
1204...
1205>>> g = f()
1206>>> g.__name__
1207'f'
1208>>> repr(g)  # doctest: +ELLIPSIS
1209'<generator object f at ...>'
1210
1211Lambdas shouldn't have their usual return behavior.
1212
1213>>> x = lambda: (yield 1)
1214>>> list(x())
1215[1]
1216
1217>>> x = lambda: ((yield 1), (yield 2))
1218>>> list(x())
1219[1, 2]
1220"""
1221
1222# conjoin is a simple backtracking generator, named in honor of Icon's
1223# "conjunction" control structure.  Pass a list of no-argument functions
1224# that return iterable objects.  Easiest to explain by example:  assume the
1225# function list [x, y, z] is passed.  Then conjoin acts like:
1226#
1227# def g():
1228#     values = [None] * 3
1229#     for values[0] in x():
1230#         for values[1] in y():
1231#             for values[2] in z():
1232#                 yield values
1233#
1234# So some 3-lists of values *may* be generated, each time we successfully
1235# get into the innermost loop.  If an iterator fails (is exhausted) before
1236# then, it "backtracks" to get the next value from the nearest enclosing
1237# iterator (the one "to the left"), and starts all over again at the next
1238# slot (pumps a fresh iterator).  Of course this is most useful when the
1239# iterators have side-effects, so that which values *can* be generated at
1240# each slot depend on the values iterated at previous slots.
1241
1242def simple_conjoin(gs):
1243
1244    values = [None] * len(gs)
1245
1246    def gen(i):
1247        if i >= len(gs):
1248            yield values
1249        else:
1250            for values[i] in gs[i]():
1251                for x in gen(i+1):
1252                    yield x
1253
1254    for x in gen(0):
1255        yield x
1256
1257# That works fine, but recursing a level and checking i against len(gs) for
1258# each item produced is inefficient.  By doing manual loop unrolling across
1259# generator boundaries, it's possible to eliminate most of that overhead.
1260# This isn't worth the bother *in general* for generators, but conjoin() is
1261# a core building block for some CPU-intensive generator applications.
1262
1263def conjoin(gs):
1264
1265    n = len(gs)
1266    values = [None] * n
1267
1268    # Do one loop nest at time recursively, until the # of loop nests
1269    # remaining is divisible by 3.
1270
1271    def gen(i):
1272        if i >= n:
1273            yield values
1274
1275        elif (n-i) % 3:
1276            ip1 = i+1
1277            for values[i] in gs[i]():
1278                for x in gen(ip1):
1279                    yield x
1280
1281        else:
1282            for x in _gen3(i):
1283                yield x
1284
1285    # Do three loop nests at a time, recursing only if at least three more
1286    # remain.  Don't call directly:  this is an internal optimization for
1287    # gen's use.
1288
1289    def _gen3(i):
1290        assert i < n and (n-i) % 3 == 0
1291        ip1, ip2, ip3 = i+1, i+2, i+3
1292        g, g1, g2 = gs[i : ip3]
1293
1294        if ip3 >= n:
1295            # These are the last three, so we can yield values directly.
1296            for values[i] in g():
1297                for values[ip1] in g1():
1298                    for values[ip2] in g2():
1299                        yield values
1300
1301        else:
1302            # At least 6 loop nests remain; peel off 3 and recurse for the
1303            # rest.
1304            for values[i] in g():
1305                for values[ip1] in g1():
1306                    for values[ip2] in g2():
1307                        for x in _gen3(ip3):
1308                            yield x
1309
1310    for x in gen(0):
1311        yield x
1312
1313# And one more approach:  For backtracking apps like the Knight's Tour
1314# solver below, the number of backtracking levels can be enormous (one
1315# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1316# needs 10,000 levels).  In such cases Python is likely to run out of
1317# stack space due to recursion.  So here's a recursion-free version of
1318# conjoin too.
1319# NOTE WELL:  This allows large problems to be solved with only trivial
1320# demands on stack space.  Without explicitly resumable generators, this is
1321# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1322# than the fancy unrolled recursive conjoin.
1323
1324def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1325    n = len(gs)
1326    values = [None] * n
1327    iters  = [None] * n
1328    _StopIteration = StopIteration  # make local because caught a *lot*
1329    i = 0
1330    while 1:
1331        # Descend.
1332        try:
1333            while i < n:
1334                it = iters[i] = gs[i]().__next__
1335                values[i] = it()
1336                i += 1
1337        except _StopIteration:
1338            pass
1339        else:
1340            assert i == n
1341            yield values
1342
1343        # Backtrack until an older iterator can be resumed.
1344        i -= 1
1345        while i >= 0:
1346            try:
1347                values[i] = iters[i]()
1348                # Success!  Start fresh at next level.
1349                i += 1
1350                break
1351            except _StopIteration:
1352                # Continue backtracking.
1353                i -= 1
1354        else:
1355            assert i < 0
1356            break
1357
1358# A conjoin-based N-Queens solver.
1359
1360class Queens:
1361    def __init__(self, n):
1362        self.n = n
1363        rangen = range(n)
1364
1365        # Assign a unique int to each column and diagonal.
1366        # columns:  n of those, range(n).
1367        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1368        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1369        # based.
1370        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1371        # each, smallest i+j is 0, largest is 2n-2.
1372
1373        # For each square, compute a bit vector of the columns and
1374        # diagonals it covers, and for each row compute a function that
1375        # generates the possibilities for the columns in that row.
1376        self.rowgenerators = []
1377        for i in rangen:
1378            rowuses = [(1 << j) |                  # column ordinal
1379                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1380                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1381                            for j in rangen]
1382
1383            def rowgen(rowuses=rowuses):
1384                for j in rangen:
1385                    uses = rowuses[j]
1386                    if uses & self.used == 0:
1387                        self.used |= uses
1388                        yield j
1389                        self.used &= ~uses
1390
1391            self.rowgenerators.append(rowgen)
1392
1393    # Generate solutions.
1394    def solve(self):
1395        self.used = 0
1396        for row2col in conjoin(self.rowgenerators):
1397            yield row2col
1398
1399    def printsolution(self, row2col):
1400        n = self.n
1401        assert n == len(row2col)
1402        sep = "+" + "-+" * n
1403        print(sep)
1404        for i in range(n):
1405            squares = [" " for j in range(n)]
1406            squares[row2col[i]] = "Q"
1407            print("|" + "|".join(squares) + "|")
1408            print(sep)
1409
1410# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1411# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1412# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1413# creating 10s of thousands of generators then!), and is lengthy.
1414
1415class Knights:
1416    def __init__(self, m, n, hard=0):
1417        self.m, self.n = m, n
1418
1419        # solve() will set up succs[i] to be a list of square #i's
1420        # successors.
1421        succs = self.succs = []
1422
1423        # Remove i0 from each of its successor's successor lists, i.e.
1424        # successors can't go back to i0 again.  Return 0 if we can
1425        # detect this makes a solution impossible, else return 1.
1426
1427        def remove_from_successors(i0, len=len):
1428            # If we remove all exits from a free square, we're dead:
1429            # even if we move to it next, we can't leave it again.
1430            # If we create a square with one exit, we must visit it next;
1431            # else somebody else will have to visit it, and since there's
1432            # only one adjacent, there won't be a way to leave it again.
1433            # Finelly, if we create more than one free square with a
1434            # single exit, we can only move to one of them next, leaving
1435            # the other one a dead end.
1436            ne0 = ne1 = 0
1437            for i in succs[i0]:
1438                s = succs[i]
1439                s.remove(i0)
1440                e = len(s)
1441                if e == 0:
1442                    ne0 += 1
1443                elif e == 1:
1444                    ne1 += 1
1445            return ne0 == 0 and ne1 < 2
1446
1447        # Put i0 back in each of its successor's successor lists.
1448
1449        def add_to_successors(i0):
1450            for i in succs[i0]:
1451                succs[i].append(i0)
1452
1453        # Generate the first move.
1454        def first():
1455            if m < 1 or n < 1:
1456                return
1457
1458            # Since we're looking for a cycle, it doesn't matter where we
1459            # start.  Starting in a corner makes the 2nd move easy.
1460            corner = self.coords2index(0, 0)
1461            remove_from_successors(corner)
1462            self.lastij = corner
1463            yield corner
1464            add_to_successors(corner)
1465
1466        # Generate the second moves.
1467        def second():
1468            corner = self.coords2index(0, 0)
1469            assert self.lastij == corner  # i.e., we started in the corner
1470            if m < 3 or n < 3:
1471                return
1472            assert len(succs[corner]) == 2
1473            assert self.coords2index(1, 2) in succs[corner]
1474            assert self.coords2index(2, 1) in succs[corner]
1475            # Only two choices.  Whichever we pick, the other must be the
1476            # square picked on move m*n, as it's the only way to get back
1477            # to (0, 0).  Save its index in self.final so that moves before
1478            # the last know it must be kept free.
1479            for i, j in (1, 2), (2, 1):
1480                this  = self.coords2index(i, j)
1481                final = self.coords2index(3-i, 3-j)
1482                self.final = final
1483
1484                remove_from_successors(this)
1485                succs[final].append(corner)
1486                self.lastij = this
1487                yield this
1488                succs[final].remove(corner)
1489                add_to_successors(this)
1490
1491        # Generate moves 3 thru m*n-1.
1492        def advance(len=len):
1493            # If some successor has only one exit, must take it.
1494            # Else favor successors with fewer exits.
1495            candidates = []
1496            for i in succs[self.lastij]:
1497                e = len(succs[i])
1498                assert e > 0, "else remove_from_successors() pruning flawed"
1499                if e == 1:
1500                    candidates = [(e, i)]
1501                    break
1502                candidates.append((e, i))
1503            else:
1504                candidates.sort()
1505
1506            for e, i in candidates:
1507                if i != self.final:
1508                    if remove_from_successors(i):
1509                        self.lastij = i
1510                        yield i
1511                    add_to_successors(i)
1512
1513        # Generate moves 3 thru m*n-1.  Alternative version using a
1514        # stronger (but more expensive) heuristic to order successors.
1515        # Since the # of backtracking levels is m*n, a poor move early on
1516        # can take eons to undo.  Smallest square board for which this
1517        # matters a lot is 52x52.
1518        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1519            # If some successor has only one exit, must take it.
1520            # Else favor successors with fewer exits.
1521            # Break ties via max distance from board centerpoint (favor
1522            # corners and edges whenever possible).
1523            candidates = []
1524            for i in succs[self.lastij]:
1525                e = len(succs[i])
1526                assert e > 0, "else remove_from_successors() pruning flawed"
1527                if e == 1:
1528                    candidates = [(e, 0, i)]
1529                    break
1530                i1, j1 = self.index2coords(i)
1531                d = (i1 - vmid)**2 + (j1 - hmid)**2
1532                candidates.append((e, -d, i))
1533            else:
1534                candidates.sort()
1535
1536            for e, d, i in candidates:
1537                if i != self.final:
1538                    if remove_from_successors(i):
1539                        self.lastij = i
1540                        yield i
1541                    add_to_successors(i)
1542
1543        # Generate the last move.
1544        def last():
1545            assert self.final in succs[self.lastij]
1546            yield self.final
1547
1548        if m*n < 4:
1549            self.squaregenerators = [first]
1550        else:
1551            self.squaregenerators = [first, second] + \
1552                [hard and advance_hard or advance] * (m*n - 3) + \
1553                [last]
1554
1555    def coords2index(self, i, j):
1556        assert 0 <= i < self.m
1557        assert 0 <= j < self.n
1558        return i * self.n + j
1559
1560    def index2coords(self, index):
1561        assert 0 <= index < self.m * self.n
1562        return divmod(index, self.n)
1563
1564    def _init_board(self):
1565        succs = self.succs
1566        del succs[:]
1567        m, n = self.m, self.n
1568        c2i = self.coords2index
1569
1570        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1571                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1572        rangen = range(n)
1573        for i in range(m):
1574            for j in rangen:
1575                s = [c2i(i+io, j+jo) for io, jo in offsets
1576                                     if 0 <= i+io < m and
1577                                        0 <= j+jo < n]
1578                succs.append(s)
1579
1580    # Generate solutions.
1581    def solve(self):
1582        self._init_board()
1583        for x in conjoin(self.squaregenerators):
1584            yield x
1585
1586    def printsolution(self, x):
1587        m, n = self.m, self.n
1588        assert len(x) == m*n
1589        w = len(str(m*n))
1590        format = "%" + str(w) + "d"
1591
1592        squares = [[None] * n for i in range(m)]
1593        k = 1
1594        for i in x:
1595            i1, j1 = self.index2coords(i)
1596            squares[i1][j1] = format % k
1597            k += 1
1598
1599        sep = "+" + ("-" * w + "+") * n
1600        print(sep)
1601        for i in range(m):
1602            row = squares[i]
1603            print("|" + "|".join(row) + "|")
1604            print(sep)
1605
1606conjoin_tests = """
1607
1608Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1609possible use of conjoin, just to generate the full cross-product.
1610
1611>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1612...     print(c)
1613[0, 0, 0]
1614[0, 0, 1]
1615[0, 1, 0]
1616[0, 1, 1]
1617[1, 0, 0]
1618[1, 0, 1]
1619[1, 1, 0]
1620[1, 1, 1]
1621
1622For efficiency in typical backtracking apps, conjoin() yields the same list
1623object each time.  So if you want to save away a full account of its
1624generated sequence, you need to copy its results.
1625
1626>>> def gencopy(iterator):
1627...     for x in iterator:
1628...         yield x[:]
1629
1630>>> for n in range(10):
1631...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1632...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
16330 1 True True
16341 2 True True
16352 4 True True
16363 8 True True
16374 16 True True
16385 32 True True
16396 64 True True
16407 128 True True
16418 256 True True
16429 512 True True
1643
1644And run an 8-queens solver.
1645
1646>>> q = Queens(8)
1647>>> LIMIT = 2
1648>>> count = 0
1649>>> for row2col in q.solve():
1650...     count += 1
1651...     if count <= LIMIT:
1652...         print("Solution", count)
1653...         q.printsolution(row2col)
1654Solution 1
1655+-+-+-+-+-+-+-+-+
1656|Q| | | | | | | |
1657+-+-+-+-+-+-+-+-+
1658| | | | |Q| | | |
1659+-+-+-+-+-+-+-+-+
1660| | | | | | | |Q|
1661+-+-+-+-+-+-+-+-+
1662| | | | | |Q| | |
1663+-+-+-+-+-+-+-+-+
1664| | |Q| | | | | |
1665+-+-+-+-+-+-+-+-+
1666| | | | | | |Q| |
1667+-+-+-+-+-+-+-+-+
1668| |Q| | | | | | |
1669+-+-+-+-+-+-+-+-+
1670| | | |Q| | | | |
1671+-+-+-+-+-+-+-+-+
1672Solution 2
1673+-+-+-+-+-+-+-+-+
1674|Q| | | | | | | |
1675+-+-+-+-+-+-+-+-+
1676| | | | | |Q| | |
1677+-+-+-+-+-+-+-+-+
1678| | | | | | | |Q|
1679+-+-+-+-+-+-+-+-+
1680| | |Q| | | | | |
1681+-+-+-+-+-+-+-+-+
1682| | | | | | |Q| |
1683+-+-+-+-+-+-+-+-+
1684| | | |Q| | | | |
1685+-+-+-+-+-+-+-+-+
1686| |Q| | | | | | |
1687+-+-+-+-+-+-+-+-+
1688| | | | |Q| | | |
1689+-+-+-+-+-+-+-+-+
1690
1691>>> print(count, "solutions in all.")
169292 solutions in all.
1693
1694And run a Knight's Tour on a 10x10 board.  Note that there are about
169520,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1696
1697>>> k = Knights(10, 10)
1698>>> LIMIT = 2
1699>>> count = 0
1700>>> for x in k.solve():
1701...     count += 1
1702...     if count <= LIMIT:
1703...         print("Solution", count)
1704...         k.printsolution(x)
1705...     else:
1706...         break
1707Solution 1
1708+---+---+---+---+---+---+---+---+---+---+
1709|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1710+---+---+---+---+---+---+---+---+---+---+
1711| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1712+---+---+---+---+---+---+---+---+---+---+
1713| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1714+---+---+---+---+---+---+---+---+---+---+
1715| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1716+---+---+---+---+---+---+---+---+---+---+
1717| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1718+---+---+---+---+---+---+---+---+---+---+
1719| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1720+---+---+---+---+---+---+---+---+---+---+
1721| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1722+---+---+---+---+---+---+---+---+---+---+
1723| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1724+---+---+---+---+---+---+---+---+---+---+
1725| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1726+---+---+---+---+---+---+---+---+---+---+
1727| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1728+---+---+---+---+---+---+---+---+---+---+
1729Solution 2
1730+---+---+---+---+---+---+---+---+---+---+
1731|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1732+---+---+---+---+---+---+---+---+---+---+
1733| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1734+---+---+---+---+---+---+---+---+---+---+
1735| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1736+---+---+---+---+---+---+---+---+---+---+
1737| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1738+---+---+---+---+---+---+---+---+---+---+
1739| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1740+---+---+---+---+---+---+---+---+---+---+
1741| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1742+---+---+---+---+---+---+---+---+---+---+
1743| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1744+---+---+---+---+---+---+---+---+---+---+
1745| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1746+---+---+---+---+---+---+---+---+---+---+
1747| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1748+---+---+---+---+---+---+---+---+---+---+
1749| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1750+---+---+---+---+---+---+---+---+---+---+
1751"""
1752
1753weakref_tests = """\
1754Generators are weakly referencable:
1755
1756>>> import weakref
1757>>> def gen():
1758...     yield 'foo!'
1759...
1760>>> wr = weakref.ref(gen)
1761>>> wr() is gen
1762True
1763>>> p = weakref.proxy(gen)
1764
1765Generator-iterators are weakly referencable as well:
1766
1767>>> gi = gen()
1768>>> wr = weakref.ref(gi)
1769>>> wr() is gi
1770True
1771>>> p = weakref.proxy(gi)
1772>>> list(p)
1773['foo!']
1774
1775"""
1776
1777coroutine_tests = """\
1778Sending a value into a started generator:
1779
1780>>> def f():
1781...     print((yield 1))
1782...     yield 2
1783>>> g = f()
1784>>> next(g)
17851
1786>>> g.send(42)
178742
17882
1789
1790Sending a value into a new generator produces a TypeError:
1791
1792>>> f().send("foo")
1793Traceback (most recent call last):
1794...
1795TypeError: can't send non-None value to a just-started generator
1796
1797
1798Yield by itself yields None:
1799
1800>>> def f(): yield
1801>>> list(f())
1802[None]
1803
1804
1805
1806An obscene abuse of a yield expression within a generator expression:
1807
1808>>> list((yield 21) for i in range(4))
1809[21, None, 21, None, 21, None, 21, None]
1810
1811And a more sane, but still weird usage:
1812
1813>>> def f(): list(i for i in [(yield 26)])
1814>>> type(f())
1815<class 'generator'>
1816
1817
1818A yield expression with augmented assignment.
1819
1820>>> def coroutine(seq):
1821...     count = 0
1822...     while count < 200:
1823...         count += yield
1824...         seq.append(count)
1825>>> seq = []
1826>>> c = coroutine(seq)
1827>>> next(c)
1828>>> print(seq)
1829[]
1830>>> c.send(10)
1831>>> print(seq)
1832[10]
1833>>> c.send(10)
1834>>> print(seq)
1835[10, 20]
1836>>> c.send(10)
1837>>> print(seq)
1838[10, 20, 30]
1839
1840
1841Check some syntax errors for yield expressions:
1842
1843>>> f=lambda: (yield 1),(yield 2)
1844Traceback (most recent call last):
1845  ...
1846SyntaxError: 'yield' outside function
1847
1848>>> def f(): x = yield = y
1849Traceback (most recent call last):
1850  ...
1851SyntaxError: assignment to yield expression not possible
1852
1853>>> def f(): (yield bar) = y
1854Traceback (most recent call last):
1855  ...
1856SyntaxError: can't assign to yield expression
1857
1858>>> def f(): (yield bar) += y
1859Traceback (most recent call last):
1860  ...
1861SyntaxError: can't assign to yield expression
1862
1863
1864Now check some throw() conditions:
1865
1866>>> def f():
1867...     while True:
1868...         try:
1869...             print((yield))
1870...         except ValueError as v:
1871...             print("caught ValueError (%s)" % (v))
1872>>> import sys
1873>>> g = f()
1874>>> next(g)
1875
1876>>> g.throw(ValueError) # type only
1877caught ValueError ()
1878
1879>>> g.throw(ValueError("xyz"))  # value only
1880caught ValueError (xyz)
1881
1882>>> g.throw(ValueError, ValueError(1))   # value+matching type
1883caught ValueError (1)
1884
1885>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
1886caught ValueError (1)
1887
1888>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
1889caught ValueError (1)
1890
1891>>> g.throw(ValueError(1), "foo")       # bad args
1892Traceback (most recent call last):
1893  ...
1894TypeError: instance exception may not have a separate value
1895
1896>>> g.throw(ValueError, "foo", 23)      # bad args
1897Traceback (most recent call last):
1898  ...
1899TypeError: throw() third argument must be a traceback object
1900
1901>>> g.throw("abc")
1902Traceback (most recent call last):
1903  ...
1904TypeError: exceptions must be classes or instances deriving from BaseException, not str
1905
1906>>> g.throw(0)
1907Traceback (most recent call last):
1908  ...
1909TypeError: exceptions must be classes or instances deriving from BaseException, not int
1910
1911>>> g.throw(list)
1912Traceback (most recent call last):
1913  ...
1914TypeError: exceptions must be classes or instances deriving from BaseException, not type
1915
1916>>> def throw(g,exc):
1917...     try:
1918...         raise exc
1919...     except:
1920...         g.throw(*sys.exc_info())
1921>>> throw(g,ValueError) # do it with traceback included
1922caught ValueError ()
1923
1924>>> g.send(1)
19251
1926
1927>>> throw(g,TypeError)  # terminate the generator
1928Traceback (most recent call last):
1929  ...
1930TypeError
1931
1932>>> print(g.gi_frame)
1933None
1934
1935>>> g.send(2)
1936Traceback (most recent call last):
1937  ...
1938StopIteration
1939
1940>>> g.throw(ValueError,6)       # throw on closed generator
1941Traceback (most recent call last):
1942  ...
1943ValueError: 6
1944
1945>>> f().throw(ValueError,7)     # throw on just-opened generator
1946Traceback (most recent call last):
1947  ...
1948ValueError: 7
1949
1950Plain "raise" inside a generator should preserve the traceback (#13188).
1951The traceback should have 3 levels:
1952- g.throw()
1953- f()
1954- 1/0
1955
1956>>> def f():
1957...     try:
1958...         yield
1959...     except:
1960...         raise
1961>>> g = f()
1962>>> try:
1963...     1/0
1964... except ZeroDivisionError as v:
1965...     try:
1966...         g.throw(v)
1967...     except Exception as w:
1968...         tb = w.__traceback__
1969>>> levels = 0
1970>>> while tb:
1971...     levels += 1
1972...     tb = tb.tb_next
1973>>> levels
19743
1975
1976Now let's try closing a generator:
1977
1978>>> def f():
1979...     try: yield
1980...     except GeneratorExit:
1981...         print("exiting")
1982
1983>>> g = f()
1984>>> next(g)
1985>>> g.close()
1986exiting
1987>>> g.close()  # should be no-op now
1988
1989>>> f().close()  # close on just-opened generator should be fine
1990
1991>>> def f(): yield      # an even simpler generator
1992>>> f().close()         # close before opening
1993>>> g = f()
1994>>> next(g)
1995>>> g.close()           # close normally
1996
1997And finalization:
1998
1999>>> def f():
2000...     try: yield
2001...     finally:
2002...         print("exiting")
2003
2004>>> g = f()
2005>>> next(g)
2006>>> del g
2007exiting
2008
2009
2010GeneratorExit is not caught by except Exception:
2011
2012>>> def f():
2013...     try: yield
2014...     except Exception:
2015...         print('except')
2016...     finally:
2017...         print('finally')
2018
2019>>> g = f()
2020>>> next(g)
2021>>> del g
2022finally
2023
2024
2025Now let's try some ill-behaved generators:
2026
2027>>> def f():
2028...     try: yield
2029...     except GeneratorExit:
2030...         yield "foo!"
2031>>> g = f()
2032>>> next(g)
2033>>> g.close()
2034Traceback (most recent call last):
2035  ...
2036RuntimeError: generator ignored GeneratorExit
2037>>> g.close()
2038
2039
2040Our ill-behaved code should be invoked during GC:
2041
2042>>> import sys, io
2043>>> old, sys.stderr = sys.stderr, io.StringIO()
2044>>> g = f()
2045>>> next(g)
2046>>> del g
2047>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
2048True
2049>>> sys.stderr = old
2050
2051
2052And errors thrown during closing should propagate:
2053
2054>>> def f():
2055...     try: yield
2056...     except GeneratorExit:
2057...         raise TypeError("fie!")
2058>>> g = f()
2059>>> next(g)
2060>>> g.close()
2061Traceback (most recent call last):
2062  ...
2063TypeError: fie!
2064
2065
2066Ensure that various yield expression constructs make their
2067enclosing function a generator:
2068
2069>>> def f(): x += yield
2070>>> type(f())
2071<class 'generator'>
2072
2073>>> def f(): x = yield
2074>>> type(f())
2075<class 'generator'>
2076
2077>>> def f(): lambda x=(yield): 1
2078>>> type(f())
2079<class 'generator'>
2080
2081>>> def f(): x=(i for i in (yield) if (yield))
2082>>> type(f())
2083<class 'generator'>
2084
2085>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2086>>> data = [1,2]
2087>>> g = f(data)
2088>>> type(g)
2089<class 'generator'>
2090>>> g.send(None)
2091'a'
2092>>> data
2093[1, 2]
2094>>> g.send(0)
2095'b'
2096>>> data
2097[27, 2]
2098>>> try: g.send(1)
2099... except StopIteration: pass
2100>>> data
2101[27, 27]
2102
2103"""
2104
2105refleaks_tests = """
2106Prior to adding cycle-GC support to itertools.tee, this code would leak
2107references. We add it to the standard suite so the routine refleak-tests
2108would trigger if it starts being uncleanable again.
2109
2110>>> import itertools
2111>>> def leak():
2112...     class gen:
2113...         def __iter__(self):
2114...             return self
2115...         def __next__(self):
2116...             return self.item
2117...     g = gen()
2118...     head, tail = itertools.tee(g)
2119...     g.item = head
2120...     return head
2121>>> it = leak()
2122
2123Make sure to also test the involvement of the tee-internal teedataobject,
2124which stores returned items.
2125
2126>>> item = next(it)
2127
2128
2129
2130This test leaked at one point due to generator finalization/destruction.
2131It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2132was removed.
2133
2134>>> def leak():
2135...    def gen():
2136...        while True:
2137...            yield g
2138...    g = gen()
2139
2140>>> leak()
2141
2142
2143
2144This test isn't really generator related, but rather exception-in-cleanup
2145related. The coroutine tests (above) just happen to cause an exception in
2146the generator's __del__ (tp_del) method. We can also test for this
2147explicitly, without generators. We do have to redirect stderr to avoid
2148printing warnings and to doublecheck that we actually tested what we wanted
2149to test.
2150
2151>>> import sys, io
2152>>> old = sys.stderr
2153>>> try:
2154...     sys.stderr = io.StringIO()
2155...     class Leaker:
2156...         def __del__(self):
2157...             def invoke(message):
2158...                 raise RuntimeError(message)
2159...             invoke("test")
2160...
2161...     l = Leaker()
2162...     del l
2163...     err = sys.stderr.getvalue().strip()
2164...     "Exception ignored in" in err
2165...     "RuntimeError: test" in err
2166...     "Traceback" in err
2167...     "in invoke" in err
2168... finally:
2169...     sys.stderr = old
2170True
2171True
2172True
2173True
2174
2175
2176These refleak tests should perhaps be in a testfile of their own,
2177test_generators just happened to be the test that drew these out.
2178
2179"""
2180
2181__test__ = {"tut":      tutorial_tests,
2182            "pep":      pep_tests,
2183            "email":    email_tests,
2184            "fun":      fun_tests,
2185            "syntax":   syntax_tests,
2186            "conjoin":  conjoin_tests,
2187            "weakref":  weakref_tests,
2188            "coroutine":  coroutine_tests,
2189            "refleaks": refleaks_tests,
2190            }
2191
2192# Magic test name that regrtest.py invokes *after* importing this module.
2193# This worms around a bootstrap problem.
2194# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
2195# so this works as expected in both ways of running regrtest.
2196def test_main(verbose=None):
2197    from test import support, test_generators
2198    support.run_unittest(__name__)
2199    support.run_doctest(test_generators, verbose)
2200
2201# This part isn't needed for regrtest, but for running the test directly.
2202if __name__ == "__main__":
2203    test_main(1)
2204