[
  {
    "path": ".coveragerc",
    "content": "[run]\nomit =\n    codetransformer/_version.py\n"
  },
  {
    "path": ".gitattributes",
    "content": "codetransformer/_version.py export-subst\n"
  },
  {
    "path": ".gitignore",
    "content": ".bundle\ndb/*.sqlite3\nlog/*.log\n*.log\ntmp/**/*\ntmp/*\n*.swp\n*~\n#mac autosaving file\n.DS_Store\n*.py[co]\n\n# Installer logs\npip-log.txt\n\n# Unit test / coverage reports\n.coverage\n.tox\ntest.log\n.noseids\n*.xlsx\n\n# Compiled python files\n*.py[co]\n\n# Packages\n*.egg\n*.egg-info\ndist\nbuild\neggs\ncover\nparts\nbin\nvar\nsdist\ndevelop-eggs\n.installed.cfg\ncoverage.xml\nnosetests.xml\n\n# C Extensions\n*.o\n*.so\n*.out\n\n# Vim\n*.swp\n*.swo\n\n# Built documentation\ndocs/_build/*\n\n# database of vbench\nbenchmarks.db\n\n# Vagrant temp folder\n.vagrant\n\n# pypi\nMANIFEST\n\n# pytest\n.cache\n\nhtmlcov\n"
  },
  {
    "path": ".travis.yml",
    "content": "language: python\nsudo: false\npython:\n  - 3.4.3\n  - 3.4\n  - 3.5\n  - 3.6\n\ninstall:\n  - pip install -e .[dev]\n\nscript:\n  - py.test codetransformer\n  - flake8 codetransformer\n\nnotifications:\n  email: false\n"
  },
  {
    "path": "LICENSE",
    "content": "             GNU GENERAL PUBLIC LICENSE\n                Version 2, June 1991\n\n Copyright (C) 1989, 1991 Free Software Foundation, Inc.,\n 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                     Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Lesser General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n             GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n                     NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n              END OF TERMS AND CONDITIONS\n\n     How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) <year>  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License along\n    with this program; if not, write to the Free Software Foundation, Inc.,\n    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) year name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include versioneer.py\ninclude codetransformer/_version.py\n"
  },
  {
    "path": "README.rst",
    "content": "``codetransformer``\n===================\n\n|build status| |documentation|\n\nBytecode transformers for CPython inspired by the ``ast`` module's\n``NodeTransformer``.\n\nWhat is ``codetransformer``?\n----------------------------\n\n``codetransformer`` is a library that allows us to work with CPython's bytecode\nrepresentation at runtime. ``codetransformer`` provides a level of abstraction\nbetween the programmer and the raw bytes read by the eval loop so that we can\nmore easily inspect and modify bytecode.\n\n``codetransformer`` is motivated by the need to override parts of the python\nlanguage that are not already hooked into through data model methods. For example:\n\n* Override the ``is`` and ``not`` operators.\n* Custom data structure literals.\n* Syntax features that cannot be represented with valid python AST or source.\n* Run without a modified CPython interpreter.\n\n``codetransformer`` was originally developed as part of lazy_ to implement\nthe transformations needed to override the code objects at runtime.\n\nExample Uses\n------------\n\nOverloading Literals\n~~~~~~~~~~~~~~~~~~~~\n\nWhile this can be done as an AST transformation, we will often need to execute\nthe constructor for the literal multiple times. Also, we need to be sure that\nany additional names required to run our code are provided when we run. With\n``codetransformer``, we can pre compute our new literals and emit code that is\nas fast as loading our unmodified literals without requiring any additional\nnames be available implicitly.\n\nIn the following block we demonstrate overloading dictionary syntax to result in\n``collections.OrderedDict`` objects. ``OrderedDict`` is like a ``dict``;\nhowever, the order of the keys is preserved.\n\n.. code-block:: python\n\n   >>> from codetransformer.transformers.literals import ordereddict_literals\n   >>> @ordereddict_literals\n   ... def f():\n   ...     return {'a': 1, 'b': 2, 'c': 3}\n   >>> f()\n   OrderedDict([('a', 1), ('b', 2), ('c', 3)])\n\nThis also supports dictionary comprehensions:\n\n.. code-block:: python\n\n   >>> @ordereddict_literals\n   ... def f():\n   ...     return {k: v for k, v in zip('abc', (1, 2, 3))}\n   >>> f()\n   OrderedDict([('a', 1), ('b', 2), ('c', 3)])\n\nThe next block overrides ``float`` literals with ``decimal.Decimal``\nobjects. These objects support arbitrary precision arithmetic.\n\n.. code-block:: python\n\n   >>> from codetransformer.transformers.literals import decimal_literals\n   >>> @decimal_literals\n   ... def f():\n   ...     return 1.5\n   >>> f()\n   Decimal('1.5')\n\nPattern Matched Exceptions\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nPattern matched exceptions are a good example of a ``CodeTransformer`` that\nwould be very complicated to implement at the AST level. This transformation\nextends the ``try/except`` syntax to accept instances of ``BaseException`` as\nwell subclasses of ``BaseException``. When excepting an instance, the ``args``\nof the exception will be compared for equality to determine which exception\nhandler should be invoked. For example:\n\n.. code-block:: python\n\n   >>> @pattern_matched_exceptions()\n   ... def foo():\n   ...     try:\n   ...         raise ValueError('bar')\n   ...     except ValueError('buzz'):\n   ...         return 'buzz'\n   ...     except ValueError('bar'):\n   ...         return 'bar'\n   >>> foo()\n   'bar'\n\nThis function raises an instance of ``ValueError`` and attempts to catch it. The\nfirst check looks for instances of ``ValueError`` that were constructed with an\nargument of ``'buzz'``. Because our custom exception is raised with ``'bar'``,\nthese are not equal and we do not enter this handler. The next handler looks for\n``ValueError('bar')`` which does match the exception we raised. We then enter\nthis block and normal python rules take over.\n\nWe may also pass their own exception matching function:\n\n.. code-block:: python\n\n    >>> def match_greater(match_expr, exc_type, exc_value, exc_traceback):\n    ...     return math_expr > exc_value.args[0]\n\n    >>> @pattern_matched_exceptions(match_greater)\n    ... def foo():\n    ...     try:\n    ...         raise ValueError(5)\n    ...     except 4:\n    ...         return 4\n    ...     except 5:\n    ...         return 5\n    ...     except 6:\n    ...         return 6\n    >>> foo()\n    6\n\nThis matches on when the match expression is greater in value than the first\nargument of any exception type that is raised. This particular behavior would be\nvery hard to mimic through AST level transformations.\n\nCore Abstractions\n-----------------\n\nThe three core abstractions of ``codetransformer`` are:\n\n1. The ``Instruction`` object which represents an opcode_ which may be paired\n   with some argument.\n2. The ``Code`` object which represents a collection of ``Instruction``\\s.\n3. The ``CodeTransformer`` object which represents a set of rules for\n   manipulating ``Code`` objects.\n\nInstructions\n~~~~~~~~~~~~\n\nThe ``Instruction`` object represents an atomic operation that can be performed\nby the CPython virtual machine. These are things like ``LOAD_NAME`` which loads\na name onto the stack, or ``ROT_TWO`` which rotates the top two stack elements.\n\nSome instructions accept an argument, for example ``LOAD_NAME``, which modifies\nthe behavior of the instruction. This is much like a function call where some\nfunctions accept arguments. Because the bytecode is always packed as raw bytes,\nthe argument must be some integer (CPython stores all arguments two in bytes).\nThis means that things that need a more rich argument system (like ``LOAD_NAME``\nwhich needs the actual name to look up) must carry around the actual arguments\nin some table and use the integer as an offset into this array. One of the key\nabstractions of the ``Instruction`` object is that the argument is always some\npython object that represents the actual argument. Any lookup table management\nis handled for the user. This is helpful because some arguments share this table\nso we don't want to add extra entries or forget to add them at all.\n\nAnother annoyance is that the instructions that handle control flow use their\nargument to say what bytecode offset to jump to. Some jumps use the absolute\nindex, others use a relative index. This also makes it hard if you want to add\nor remove instructions because all of the offsets must be recomputed. In\n``codetransformer``, the jump instructions all accept another ``Instruction`` as\nthe argument so that the assembler can manage this for the user. We also provide\nan easy way for new instructions to \"steal\" jumps that targeted another\ninstruction so that can manage altering the bytecode around jump targets.\n\nCode\n~~~~\n\n``Code`` objects are a nice abstraction over python's\n``types.CodeType``. Quoting the ``CodeType`` constructor docstring:\n\n::\n\n   code(argcount, kwonlyargcount, nlocals, stacksize, flags, codestring,\n         constants, names, varnames, filename, name, firstlineno,\n         lnotab[, freevars[, cellvars]])\n\n   Create a code object.  Not for the faint of heart.\n\nThe ``codetransformer`` abstraction is designed to make it easy to dynamically\nconstruct and inspect these objects. This allows us to easy set things like the\nargument names, and manipulate the line number mappings.\n\nThe ``Code`` object provides methods for converting to and from Python's code\nrepresentation:\n\n1. ``from_pycode``\n2. ``to_pycode``.\n\nThis allows us to take an existing function, parse the meaning from it, modify\nit, and then assemble this back into a new python code object.\n\n.. note::\n\n   ``Code`` objects are immutable. When we say \"modify\", we mean create a copy\n   with different values.\n\nCodeTransformers\n----------------\n\nThis is the set of rules that are used to actually modify the ``Code``\nobjects. These rules are defined as a set of ``patterns`` which are a DSL used\nto define a DFA for matching against sequences of ``Instruction`` objects. Once\nwe have matched a segment, we yield new instructions to replace what we have\nmatched. A simple codetransformer looks like:\n\n.. code-block:: python\n\n   from codetransformer import CodeTransformer, instructions\n\n   class FoldNames(CodeTransformer):\n       @pattern(\n           instructions.LOAD_GLOBAL,\n           instructions.LOAD_GLOBAL,\n           instructions.BINARY_ADD,\n       )\n       def _load_fast(self, a, b, add):\n           yield instructions.LOAD_FAST(a.arg + b.arg).steal(a)\n\nThis ``CodeTransformer`` uses the ``+`` operator to implement something like\n``CPP``\\s token pasting for local variables. We read this pattern as a sequence\nof two ``LOAD_GLOBAL`` (global name lookups) followed by a ``BINARY_ADD``\ninstruction (``+`` operator call). This will then call the function with the\nthree instructions passed positionally. This handler replaces this sequence with\na single instruction that emits a ``LOAD_FAST`` (local name lookup) that is the\nresult of adding the two names together. We then steal any jumps that used to\ntarget the first ``LOAD_GLOBAL``.\n\nWe can execute this transformer by calling an instance of it on a\nfunction object, or using it like a decorator. For example:\n\n.. code-block:: python\n\n   >>> @FoldNames()\n   ... def f():\n   ...     ab = 3\n   ...     return a + b\n   >>> f()\n   3\n\n\nLicense\n-------\n\n``codetransformer`` is free software, licensed under the GNU General Public\nLicense, version 2. For more information see the ``LICENSE`` file.\n\n\nSource\n------\n\nSource code is hosted on github at\nhttps://github.com/llllllllll/codetransformer.\n\n\n.. _lazy: https://github.com/llllllllll/lazy_python\n.. _opcode: https://docs.python.org/3.5/library/dis.html#opcode-NOP\n.. |build status| image:: https://travis-ci.org/llllllllll/codetransformer.svg?branch=master\n   :target: https://travis-ci.org/llllllllll/codetransformer\n.. |documentation| image:: https://readthedocs.org/projects/codetransformer/badge/?version=stable\n   :target: http://codetransformer.readthedocs.io/en/stable/?badge=stable\n   :alt: Documentation Status\n"
  },
  {
    "path": "codetransformer/__init__.py",
    "content": "from .code import Code, Flag\nfrom .core import CodeTransformer\nfrom . patterns import (\n    matchany,\n    not_,\n    option,\n    or_,\n    pattern,\n    plus,\n    seq,\n    var,\n)\nfrom . import instructions\nfrom . import transformers\nfrom .utils.pretty import a, d, display, pprint_ast, pformat_ast\nfrom ._version import get_versions\n\n\n__version__ = get_versions()['version']\ndel get_versions\n\n\ndef load_ipython_extension(ipython):  # pragma: no cover\n\n    def dis_magic(line, cell=None):\n        if cell is None:\n            return d(line)\n        return d(cell)\n    ipython.register_magic_function(dis_magic, 'line_cell', 'dis')\n\n    def ast_magic(line, cell=None):\n        if cell is None:\n            return a(line)\n        return a(cell)\n    ipython.register_magic_function(ast_magic, 'line_cell', 'ast')\n\n\n__all__ = [\n    'a',\n    'd',\n    'display',\n    'Code',\n    'CodeTransformer',\n    'Flag',\n    'instructions',\n    'matchany',\n    'not_',\n    'option',\n    'or_',\n    'pattern',\n    'pattern',\n    'plus',\n    'pformat_ast',\n    'pprint_ast',\n    'seq',\n    'var',\n    'transformers',\n]\n"
  },
  {
    "path": "codetransformer/_version.py",
    "content": "\n# This file helps to compute a version number in source trees obtained from\n# git-archive tarball (such as those provided by githubs download-from-tag\n# feature). Distribution tarballs (built by setup.py sdist) and build\n# directories (produced by setup.py build) will contain a much shorter file\n# that just contains the computed version number.\n\n# This file is released into the public domain. Generated by\n# versioneer-0.15 (https://github.com/warner/python-versioneer)\n\nimport errno\nimport os\nimport re\nimport subprocess\nimport sys\n\n\ndef get_keywords():\n    # these strings will be replaced by git during git-archive.\n    # setup.py/versioneer.py will grep for the variable names, so they must\n    # each be defined on a line of their own. _version.py will just call\n    # get_keywords().\n    git_refnames = \"$Format:%d$\"\n    git_full = \"$Format:%H$\"\n    keywords = {\"refnames\": git_refnames, \"full\": git_full}\n    return keywords\n\n\nclass VersioneerConfig:\n    pass\n\n\ndef get_config():\n    # these strings are filled in when 'setup.py versioneer' creates\n    # _version.py\n    cfg = VersioneerConfig()\n    cfg.VCS = \"git\"\n    cfg.style = \"pep440\"\n    cfg.tag_prefix = \"\"\n    cfg.parentdir_prefix = \"codetransformer-\"\n    cfg.versionfile_source = \"codetransformer/_version.py\"\n    cfg.verbose = False\n    return cfg\n\n\nclass NotThisMethod(Exception):\n    pass\n\n\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method):  # decorator\n    def decorate(f):\n        if vcs not in HANDLERS:\n            HANDLERS[vcs] = {}\n        HANDLERS[vcs][method] = f\n        return f\n    return decorate\n\n\ndef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):\n    assert isinstance(commands, list)\n    p = None\n    for c in commands:\n        try:\n            dispcmd = str([c] + args)\n            # remember shell=False, so use git.cmd on windows, not just git\n            p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,\n                                 stderr=(subprocess.PIPE if hide_stderr\n                                         else None))\n            break\n        except EnvironmentError:\n            e = sys.exc_info()[1]\n            if e.errno == errno.ENOENT:\n                continue\n            if verbose:\n                print(\"unable to run %s\" % dispcmd)\n                print(e)\n            return None\n    else:\n        if verbose:\n            print(\"unable to find command, tried %s\" % (commands,))\n        return None\n    stdout = p.communicate()[0].strip()\n    if sys.version_info[0] >= 3:\n        stdout = stdout.decode()\n    if p.returncode != 0:\n        if verbose:\n            print(\"unable to run %s (error)\" % dispcmd)\n        return None\n    return stdout\n\n\ndef versions_from_parentdir(parentdir_prefix, root, verbose):\n    # Source tarballs conventionally unpack into a directory that includes\n    # both the project name and a version string.\n    dirname = os.path.basename(root)\n    if not dirname.startswith(parentdir_prefix):\n        if verbose:\n            print(\"guessing rootdir is '%s', but '%s' doesn't start with \"\n                  \"prefix '%s'\" % (root, dirname, parentdir_prefix))\n        raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n    return {\"version\": dirname[len(parentdir_prefix):],\n            \"full-revisionid\": None,\n            \"dirty\": False, \"error\": None}\n\n\n@register_vcs_handler(\"git\", \"get_keywords\")\ndef git_get_keywords(versionfile_abs):\n    # the code embedded in _version.py can just fetch the value of these\n    # keywords. When used from setup.py, we don't want to import _version.py,\n    # so we do it with a regexp instead. This function is not used from\n    # _version.py.\n    keywords = {}\n    try:\n        f = open(versionfile_abs, \"r\")\n        for line in f.readlines():\n            if line.strip().startswith(\"git_refnames =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"refnames\"] = mo.group(1)\n            if line.strip().startswith(\"git_full =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"full\"] = mo.group(1)\n        f.close()\n    except EnvironmentError:\n        pass\n    return keywords\n\n\n@register_vcs_handler(\"git\", \"keywords\")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n    if not keywords:\n        raise NotThisMethod(\"no keywords at all, weird\")\n    refnames = keywords[\"refnames\"].strip()\n    if refnames.startswith(\"$Format\"):\n        if verbose:\n            print(\"keywords are unexpanded, not using\")\n        raise NotThisMethod(\"unexpanded keywords, not a git-archive tarball\")\n    refs = set([r.strip() for r in refnames.strip(\"()\").split(\",\")])\n    # starting in git-1.8.3, tags are listed as \"tag: foo-1.0\" instead of\n    # just \"foo-1.0\". If we see a \"tag: \" prefix, prefer those.\n    TAG = \"tag: \"\n    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n    if not tags:\n        # Either we're using git < 1.8.3, or there really are no tags. We use\n        # a heuristic: assume all version tags have a digit. The old git %d\n        # expansion behaves like git log --decorate=short and strips out the\n        # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n        # between branches and tags. By ignoring refnames without digits, we\n        # filter out many common branch names like \"release\" and\n        # \"stabilization\", as well as \"HEAD\" and \"master\".\n        tags = set([r for r in refs if re.search(r'\\d', r)])\n        if verbose:\n            print(\"discarding '%s', no digits\" % \",\".join(refs-tags))\n    if verbose:\n        print(\"likely tags: %s\" % \",\".join(sorted(tags)))\n    for ref in sorted(tags):\n        # sorting will prefer e.g. \"2.0\" over \"2.0rc1\"\n        if ref.startswith(tag_prefix):\n            r = ref[len(tag_prefix):]\n            if verbose:\n                print(\"picking %s\" % r)\n            return {\"version\": r,\n                    \"full-revisionid\": keywords[\"full\"].strip(),\n                    \"dirty\": False, \"error\": None\n                    }\n    # no suitable tags, so version is \"0+unknown\", but full hex is still there\n    if verbose:\n        print(\"no suitable tags, using unknown + full revision id\")\n    return {\"version\": \"0+unknown\",\n            \"full-revisionid\": keywords[\"full\"].strip(),\n            \"dirty\": False, \"error\": \"no suitable tags\"}\n\n\n@register_vcs_handler(\"git\", \"pieces_from_vcs\")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n    # this runs 'git' from the root of the source tree. This only gets called\n    # if the git-archive 'subst' keywords were *not* expanded, and\n    # _version.py hasn't already been rewritten with a short version string,\n    # meaning we're inside a checked out source tree.\n\n    if not os.path.exists(os.path.join(root, \".git\")):\n        if verbose:\n            print(\"no .git in %s\" % root)\n        raise NotThisMethod(\"no .git directory\")\n\n    GITS = [\"git\"]\n    if sys.platform == \"win32\":\n        GITS = [\"git.cmd\", \"git.exe\"]\n    # if there is a tag, this yields TAG-NUM-gHEX[-dirty]\n    # if there are no tags, this yields HEX[-dirty] (no NUM)\n    describe_out = run_command(GITS, [\"describe\", \"--tags\", \"--dirty\",\n                                      \"--always\", \"--long\"],\n                               cwd=root)\n    # --long was added in git-1.5.5\n    if describe_out is None:\n        raise NotThisMethod(\"'git describe' failed\")\n    describe_out = describe_out.strip()\n    full_out = run_command(GITS, [\"rev-parse\", \"HEAD\"], cwd=root)\n    if full_out is None:\n        raise NotThisMethod(\"'git rev-parse' failed\")\n    full_out = full_out.strip()\n\n    pieces = {}\n    pieces[\"long\"] = full_out\n    pieces[\"short\"] = full_out[:7]  # maybe improved later\n    pieces[\"error\"] = None\n\n    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n    # TAG might have hyphens.\n    git_describe = describe_out\n\n    # look for -dirty suffix\n    dirty = git_describe.endswith(\"-dirty\")\n    pieces[\"dirty\"] = dirty\n    if dirty:\n        git_describe = git_describe[:git_describe.rindex(\"-dirty\")]\n\n    # now we have TAG-NUM-gHEX or HEX\n\n    if \"-\" in git_describe:\n        # TAG-NUM-gHEX\n        mo = re.search(r'^(.+)-(\\d+)-g([0-9a-f]+)$', git_describe)\n        if not mo:\n            # unparseable. Maybe git-describe is misbehaving?\n            pieces[\"error\"] = (\"unable to parse git-describe output: '%s'\"\n                               % describe_out)\n            return pieces\n\n        # tag\n        full_tag = mo.group(1)\n        if not full_tag.startswith(tag_prefix):\n            if verbose:\n                fmt = \"tag '%s' doesn't start with prefix '%s'\"\n                print(fmt % (full_tag, tag_prefix))\n            pieces[\"error\"] = (\"tag '%s' doesn't start with prefix '%s'\"\n                               % (full_tag, tag_prefix))\n            return pieces\n        pieces[\"closest-tag\"] = full_tag[len(tag_prefix):]\n\n        # distance: number of commits since tag\n        pieces[\"distance\"] = int(mo.group(2))\n\n        # commit: short hex revision ID\n        pieces[\"short\"] = mo.group(3)\n\n    else:\n        # HEX: no tags\n        pieces[\"closest-tag\"] = None\n        count_out = run_command(GITS, [\"rev-list\", \"HEAD\", \"--count\"],\n                                cwd=root)\n        pieces[\"distance\"] = int(count_out)  # total number of commits\n\n    return pieces\n\n\ndef plus_or_dot(pieces):\n    if \"+\" in pieces.get(\"closest-tag\", \"\"):\n        return \".\"\n    return \"+\"\n\n\ndef render_pep440(pieces):\n    # now build up version string, with post-release \"local version\n    # identifier\". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n    # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n    # exceptions:\n    # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += plus_or_dot(pieces)\n            rendered += \"%d.g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n            if pieces[\"dirty\"]:\n                rendered += \".dirty\"\n    else:\n        # exception #1\n        rendered = \"0+untagged.%d.g%s\" % (pieces[\"distance\"],\n                                          pieces[\"short\"])\n        if pieces[\"dirty\"]:\n            rendered += \".dirty\"\n    return rendered\n\n\ndef render_pep440_pre(pieces):\n    # TAG[.post.devDISTANCE] . No -dirty\n\n    # exceptions:\n    # 1: no tags. 0.post.devDISTANCE\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \".post.dev%d\" % pieces[\"distance\"]\n    else:\n        # exception #1\n        rendered = \"0.post.dev%d\" % pieces[\"distance\"]\n    return rendered\n\n\ndef render_pep440_post(pieces):\n    # TAG[.postDISTANCE[.dev0]+gHEX] . The \".dev0\" means dirty. Note that\n    # .dev0 sorts backwards (a dirty tree will appear \"older\" than the\n    # corresponding clean one), but you shouldn't be releasing software with\n    # -dirty anyways.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%d\" % pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n            rendered += plus_or_dot(pieces)\n            rendered += \"g%s\" % pieces[\"short\"]\n    else:\n        # exception #1\n        rendered = \"0.post%d\" % pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n        rendered += \"+g%s\" % pieces[\"short\"]\n    return rendered\n\n\ndef render_pep440_old(pieces):\n    # TAG[.postDISTANCE[.dev0]] . The \".dev0\" means dirty.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%d\" % pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n    else:\n        # exception #1\n        rendered = \"0.post%d\" % pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n    return rendered\n\n\ndef render_git_describe(pieces):\n    # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty\n    # --always'\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render_git_describe_long(pieces):\n    # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty\n    # --always -long'. The distance/hash is unconditional.\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render(pieces, style):\n    if pieces[\"error\"]:\n        return {\"version\": \"unknown\",\n                \"full-revisionid\": pieces.get(\"long\"),\n                \"dirty\": None,\n                \"error\": pieces[\"error\"]}\n\n    if not style or style == \"default\":\n        style = \"pep440\"  # the default\n\n    if style == \"pep440\":\n        rendered = render_pep440(pieces)\n    elif style == \"pep440-pre\":\n        rendered = render_pep440_pre(pieces)\n    elif style == \"pep440-post\":\n        rendered = render_pep440_post(pieces)\n    elif style == \"pep440-old\":\n        rendered = render_pep440_old(pieces)\n    elif style == \"git-describe\":\n        rendered = render_git_describe(pieces)\n    elif style == \"git-describe-long\":\n        rendered = render_git_describe_long(pieces)\n    else:\n        raise ValueError(\"unknown style '%s'\" % style)\n\n    return {\"version\": rendered, \"full-revisionid\": pieces[\"long\"],\n            \"dirty\": pieces[\"dirty\"], \"error\": None}\n\n\ndef get_versions():\n    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have\n    # __file__, we can work backwards from there to the root. Some\n    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which\n    # case we can only use expanded keywords.\n\n    cfg = get_config()\n    verbose = cfg.verbose\n\n    try:\n        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,\n                                          verbose)\n    except NotThisMethod:\n        pass\n\n    try:\n        root = os.path.realpath(__file__)\n        # versionfile_source is the relative path from the top of the source\n        # tree (where the .git directory might live) to this file. Invert\n        # this to find the root from __file__.\n        for i in cfg.versionfile_source.split('/'):\n            root = os.path.dirname(root)\n    except NameError:\n        return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n                \"dirty\": None,\n                \"error\": \"unable to find root of source tree\"}\n\n    try:\n        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)\n        return render(pieces, cfg.style)\n    except NotThisMethod:\n        pass\n\n    try:\n        if cfg.parentdir_prefix:\n            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n    except NotThisMethod:\n        pass\n\n    return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n            \"dirty\": None,\n            \"error\": \"unable to compute version\"}\n"
  },
  {
    "path": "codetransformer/code.py",
    "content": "from collections import OrderedDict\nfrom dis import Bytecode, dis, findlinestarts\nfrom enum import IntEnum, unique\nfrom functools import reduce\nfrom itertools import repeat\nimport operator as op\nimport sys\nfrom types import CodeType\n\nfrom .instructions import (\n    Instruction,\n    LOAD_CONST,\n    YIELD_FROM,\n    YIELD_VALUE,\n    _RawArg,\n)\nfrom .utils.functional import scanl, reverse_dict, ffill\nfrom .utils.immutable import lazyval\nfrom .utils.instance import instance\n\n\nWORDCODE = sys.version_info >= (3, 6)\nif WORDCODE:\n    argsize = 1\n    max_lnotab_increment = 127\n\n    def _sparse_args(instrs):\n        for instr in instrs:\n            yield instr\n            yield None\n\nelse:\n    argsize = 2\n    max_lnotab_increment = 255\n\n    def _sparse_args(instrs):\n        for instr in instrs:\n            yield instr\n            if instr.have_arg:\n                yield None\n                yield None\n\n\n_sparse_args.__doc__ = \"\"\"\\\nMakes the arguments sparse so that instructions live at the correct index for\nthe jump resolution step.\n\nThis pads the instruction set with None to mark the bytes occupied by\narguments.\n\nParameters\n----------\ninstrs : iterable of Instruction\n    The dense instruction set.\n\nYields\n------\nsparse : Instruction or None\n    Yields the instructions, with objects marking the bytes that are used for\n    arguments.\n\"\"\"\n\n\n@unique\nclass Flag(IntEnum):\n    \"\"\"\n    An enum describing the bitmask of flags that can be set on a code object.\n    \"\"\"\n    # These enum values and comments are taken from CPython.\n    CO_OPTIMIZED = 0x0001\n    CO_NEWLOCALS = 0x0002\n    CO_VARARGS = 0x0004\n    CO_VARKEYWORDS = 0x0008\n    CO_NESTED = 0x0010\n    CO_GENERATOR = 0x0020\n\n    # The CO_NOFREE flag is set if there are no free or cell variables.\n    # This information is redundant, but it allows a single flag test\n    # to determine whether there is any extra work to be done when the\n    # call frame it setup.\n    CO_NOFREE = 0x0040\n\n    # The CO_COROUTINE flag is set for coroutines creates with the\n    # types.coroutine decorator. This converts old-style coroutines into\n    # python3.5 style coroutines.\n    CO_COROUTINE = 0x0080\n    CO_ITERABLE_COROUTINE = 0x0100\n\n    # Old values:\n    CO_FUTURE_DIVISION = 0x2000\n    CO_FUTURE_ABSOLUTE_IMPORT = 0x4000  # Do absolute imports by default.\n    CO_FUTURE_WITH_STATEMENT = 0x8000\n    CO_FUTURE_PRINT_FUNCTION = 0x10000\n    CO_FUTURE_UNICODE_LITERALS = 0x20000\n\n    CO_FUTURE_BARRY_AS_BDFL = 0x40000\n    CO_FUTURE_GENERATOR_STOP = 0x80000\n\n    @instance\n    class max:\n        \"\"\"The largest bitmask that represents a valid flag.\n        \"\"\"\n        def __get__(self, instance, owner):\n            return owner.pack(**dict(zip(owner.__members__, repeat(True))))\n\n        def __set__(self, instance, value):\n            raise AttributeError(\"can't set 'max' attribute\")\n\n    @classmethod\n    def pack(cls,\n             *,\n             CO_OPTIMIZED,\n             CO_NEWLOCALS,\n             CO_VARARGS,\n             CO_VARKEYWORDS,\n             CO_NESTED,\n             CO_GENERATOR,\n             CO_NOFREE,\n             CO_COROUTINE,\n             CO_ITERABLE_COROUTINE,\n             CO_FUTURE_DIVISION,\n             CO_FUTURE_ABSOLUTE_IMPORT,\n             CO_FUTURE_WITH_STATEMENT,\n             CO_FUTURE_PRINT_FUNCTION,\n             CO_FUTURE_UNICODE_LITERALS,\n             CO_FUTURE_BARRY_AS_BDFL,\n             CO_FUTURE_GENERATOR_STOP):\n        \"\"\"Pack a flags into a bitmask.\n\n        I hope you like kwonly args.\n\n        Parameters\n        ----------\n        CO_OPTIMIZED : bool\n        CO_NEWLOCALS : bool\n        CO_VARARGS : bool\n        CO_VARKEYWORDS : bool\n        CO_NESTED : bool\n        CO_GENERATOR : bool\n        CO_NOFREE : bool\n        CO_COROUTINE : bool\n        CO_ITERABLE_COROUTINE : bool\n        CO_FUTURE_DIVISION : bool\n        CO_FUTURE_ABSOLUTE_IMPORT : bool\n        CO_FUTURE_WITH_STATEMENT : bool\n        CO_FUTURE_PRINT_FUNCTION : bool\n        CO_FUTURE_UNICODE_LITERALS : bool\n        CO_FUTURE_BARRY_AS_BDFL : bool\n        CO_FUTURE_GENERATOR_STOP : bool\n\n        Returns\n        -------\n        mask : int\n\n        See Also\n        --------\n        codetransformer.code.Flag.unpack\n        \"\"\"\n        ls = locals()\n        return reduce(\n            op.or_,\n            (v for k, v in cls.__members__.items() if ls[k]),\n            0,\n        )\n\n    @classmethod\n    def unpack(cls, mask):\n        \"\"\"Unpack a bitmask into a map of flag to bool.\n\n        Parameters\n        ----------\n        mask : int\n            A bitmask\n\n        Returns\n        -------\n        mapping : OrderedDict[str -> bool]\n            The mapping of flag name to flag status.\n\n        See Also\n        --------\n        codetransformer.code.Flag.pack\n        \"\"\"\n        if mask > cls.max:\n            raise ValueError('Invalid mask, too large: %d' % mask)\n\n        return OrderedDict(\n            (k, bool(mask & getattr(cls, k)))\n            for k, v in cls.__members__.items()\n        )\n\n\ndef _freevar_argname(arg, cellvars, freevars):\n    \"\"\"\n    Get the name of the variable manipulated by a 'uses_free' instruction.\n\n    Parameters\n    ----------\n    arg : int\n        The raw argument to a uses_free instruction that we want to resolve to\n        a name.\n    cellvars : list[str]\n        The co_cellvars of the function for which we want to resolve `arg`.\n    freevars : list[str]\n        The co_freevars of the function for which we want to resolve `arg`.\n\n    Notes\n    -----\n    From https://docs.python.org/3.5/library/dis.html#opcode-LOAD_CLOSURE:\n\n        The name of the variable is co_cellvars[i] if i is less than the length\n        of co_cellvars. Otherwise it is co_freevars[i - len(co_cellvars)]\n    \"\"\"\n    len_cellvars = len(cellvars)\n    if arg < len_cellvars:\n        return cellvars[arg]\n    return freevars[arg - len_cellvars]\n\n\ndef pycode(argcount,\n           kwonlyargcount,\n           nlocals,\n           stacksize,\n           flags,\n           codestring,\n           constants,\n           names,\n           varnames,\n           filename,\n           name,\n           firstlineno,\n           lnotab,\n           freevars=(),\n           cellvars=()):\n    \"\"\"types.CodeType constructor that accepts keyword arguments.\n\n    See Also\n    --------\n    types.CodeType\n    \"\"\"\n    return CodeType(\n        argcount,\n        kwonlyargcount,\n        nlocals,\n        stacksize,\n        flags,\n        codestring,\n        constants,\n        names,\n        varnames,\n        filename,\n        name,\n        firstlineno,\n        lnotab,\n        freevars,\n        cellvars,\n    )\n\n\nclass Code:\n    \"\"\"A higher abstraction over python's CodeType.\n\n    See Include/code.h for more information.\n\n    Parameters\n    ----------\n    instrs : iterable of Instruction\n        A sequence of codetransformer Instruction objects.\n    argnames : iterable of str, optional\n        The names of the arguments to the code object.\n    name : str, optional\n        The name of this code object.\n    filename : str, optional\n        The file that this code object came from.\n    firstlineno : int, optional\n        The first line number of the code in this code object.\n    lnotab : dict[Instruction -> int], optional\n        The mapping from instruction to the line that it starts.\n    flags : dict[str -> bool], optional\n        Any flags to set. This updates the default flag set.\n\n    Attributes\n    ----------\n    argcount\n    argnames\n    cellvars\n    constructs_new_locals\n    consts\n    filename\n    flags\n    freevars\n    instrs\n    is_coroutine\n    is_generator\n    is_iterable_coroutine\n    is_nested\n    kwonlyargcount\n    lnotab\n    name\n    names\n    py_lnotab\n    sparse_instrs\n    stacksize\n    varnames\n    \"\"\"\n    __slots__ = (\n        '_instrs',\n        '_argnames',\n        '_argcount',\n        '_kwonlyargcount',\n        '_cellvars',\n        '_freevars',\n        '_name',\n        '_filename',\n        '_firstlineno',\n        '_lnotab',\n        '_flags',\n        '__weakref__',\n    )\n\n    def __init__(self,\n                 instrs,\n                 argnames=(),\n                 *,\n                 cellvars=(),\n                 freevars=(),\n                 name='<code>',\n                 filename='<code>',\n                 firstlineno=1,\n                 lnotab=None,\n                 flags=None):\n\n        instrs = tuple(instrs)  # strictly evaluate any generators.\n\n        # The starting varnames (the names of the arguments to the function)\n        argcount = [0]\n        kwonlyargcount = [0]\n        argcounter = argcount  # Which set of args are we currently counting.\n        _argnames = []\n        append_argname = _argnames.append\n        varg = kwarg = None\n        for argname in argnames:\n            if argname.startswith('**'):\n                if kwarg is not None:\n                    raise ValueError('cannot specify **kwargs more than once')\n                kwarg = argname[2:]\n                continue\n            elif argname.startswith('*'):\n                if varg is not None:\n                    raise ValueError('cannot specify *args more than once')\n                varg = argname[1:]\n                argcounter = kwonlyargcount  # all following args are kwonly.\n                continue\n            argcounter[0] += 1\n            append_argname(argname)\n\n        if varg is not None:\n            append_argname(varg)\n        if kwarg is not None:\n            append_argname(kwarg)\n\n        cellvar_names = set(cellvars)\n        freevar_names = set(freevars)\n        for instr in filter(op.attrgetter('uses_free'), instrs):\n            if instr.arg in cellvar_names:\n                instr._vartype = 'cell'\n            elif instr.arg in freevar_names:\n                instr._vartype = 'free'\n            else:\n                raise ValueError(\n                    \"Argument to %r is not in cellvars or freevars.\" % instr\n                )\n\n        for instr in filter(op.attrgetter('is_jmp'), instrs):\n            instr.arg._target_of.add(instr)\n\n        self._instrs = instrs\n        self._argnames = tuple(_argnames)\n        self._argcount = argcount[0]\n        self._kwonlyargcount = kwonlyargcount[0]\n        self._cellvars = cellvars\n        self._freevars = freevars\n        self._name = name\n        self._filename = filename\n        self._firstlineno = firstlineno\n        self._lnotab = lnotab or {}\n        self._flags = Flag.pack(**dict(\n            dict(\n                CO_OPTIMIZED=True,\n                CO_NEWLOCALS=True,\n                CO_VARARGS=varg is not None,\n                CO_VARKEYWORDS=kwarg is not None,\n                CO_NESTED=False,\n                CO_GENERATOR=any(\n                    isinstance(instr, (YIELD_VALUE, YIELD_FROM))\n                    for instr in instrs\n                ),\n                CO_NOFREE=not any(map(op.attrgetter('uses_free'), instrs)),\n                CO_COROUTINE=False,\n                CO_ITERABLE_COROUTINE=False,\n                CO_FUTURE_DIVISION=False,\n                CO_FUTURE_ABSOLUTE_IMPORT=False,\n                CO_FUTURE_WITH_STATEMENT=False,\n                CO_FUTURE_PRINT_FUNCTION=False,\n                CO_FUTURE_UNICODE_LITERALS=False,\n                CO_FUTURE_BARRY_AS_BDFL=False,\n                CO_FUTURE_GENERATOR_STOP=False,\n            ),\n            **flags or {}\n        ))\n\n    @classmethod\n    def from_pyfunc(cls, f):\n        \"\"\"Create a Code object from a python function object.\n\n        Parameters\n        ----------\n        f : function\n            The function from which to construct a code object.\n\n        Returns\n        -------\n        code : Code\n            A Code object representing f.__code__.\n        \"\"\"\n        return cls.from_pycode(f.__code__)\n\n    @classmethod\n    def from_pycode(cls, co):\n        \"\"\"Create a Code object from a python code object.\n\n        Parameters\n        ----------\n        co : CodeType\n            The python code object.\n\n        Returns\n        -------\n        code : Code\n            The codetransformer Code object.\n        \"\"\"\n        # Make it sparse to instrs[n] is the instruction at bytecode[n]\n        sparse_instrs = tuple(\n            _sparse_args(\n                Instruction.from_opcode(\n                    b.opcode,\n                    Instruction._no_arg if b.arg is None else _RawArg(b.arg),\n                ) for b in Bytecode(co)\n            ),\n        )\n        for idx, instr in enumerate(sparse_instrs):\n            if instr is None:\n                # The sparse value\n                continue\n            if instr.absjmp:\n                instr.arg = sparse_instrs[instr.arg]\n            elif instr.reljmp:\n                instr.arg = sparse_instrs[instr.arg + idx + argsize + 1]\n            elif isinstance(instr, LOAD_CONST):\n                instr.arg = co.co_consts[instr.arg]\n            elif instr.uses_name:\n                instr.arg = co.co_names[instr.arg]\n            elif instr.uses_varname:\n                instr.arg = co.co_varnames[instr.arg]\n            elif instr.uses_free:\n                instr.arg = _freevar_argname(\n                    instr.arg,\n                    co.co_freevars,\n                    co.co_cellvars,\n                )\n            elif instr.have_arg and isinstance(instr.arg, _RawArg):\n                instr.arg = int(instr.arg)\n\n        flags = Flag.unpack(co.co_flags)\n        has_vargs = flags['CO_VARARGS']\n        has_kwargs = flags['CO_VARKEYWORDS']\n\n        # Here we convert the varnames format into our argnames format.\n        paramnames = co.co_varnames[\n            :(co.co_argcount +\n              co.co_kwonlyargcount +\n              has_vargs +\n              has_kwargs)\n        ]\n        # We start with the positional arguments.\n        new_paramnames = list(paramnames[:co.co_argcount])\n        # Add *args next.\n        if has_vargs:\n            new_paramnames.append('*' + paramnames[-1 - has_kwargs])\n        # Add positional only arguments next.\n        new_paramnames.extend(paramnames[\n            co.co_argcount:co.co_argcount + co.co_kwonlyargcount\n        ])\n        # Add **kwargs last.\n        if has_kwargs:\n            new_paramnames.append('**' + paramnames[-1])\n\n        return cls(\n            filter(bool, sparse_instrs),\n            argnames=new_paramnames,\n            cellvars=co.co_cellvars,\n            freevars=co.co_freevars,\n            name=co.co_name,\n            filename=co.co_filename,\n            firstlineno=co.co_firstlineno,\n            lnotab={\n                lno: sparse_instrs[off] for off, lno in findlinestarts(co)\n            },\n            flags=flags,\n        )\n\n    def to_pycode(self):\n        \"\"\"Create a python code object from the more abstract\n        codetransfomer.Code object.\n\n        Returns\n        -------\n        co : CodeType\n            The python code object.\n        \"\"\"\n        consts = self.consts\n        names = self.names\n        varnames = self.varnames\n        freevars = self.freevars\n        cellvars = self.cellvars\n        bc = bytearray()\n        for instr in self.instrs:\n            bc.append(instr.opcode)  # Write the opcode byte.\n            if isinstance(instr, LOAD_CONST):\n                # Resolve the constant index.\n                bc.extend(consts.index(instr.arg).to_bytes(argsize, 'little'))\n            elif instr.uses_name:\n                # Resolve the name index.\n                bc.extend(names.index(instr.arg).to_bytes(argsize, 'little'))\n            elif instr.uses_varname:\n                # Resolve the local variable index.\n                bc.extend(\n                    varnames.index(instr.arg).to_bytes(argsize, 'little'),\n                )\n            elif instr.uses_free:\n                # uses_free is really \"uses freevars **or** cellvars\".\n                try:\n                    # look for the name in cellvars\n                    bc.extend(\n                        cellvars.index(instr.arg).to_bytes(argsize, 'little'),\n                    )\n                except ValueError:\n                    # fall back to freevars, incrementing the length of\n                    # cellvars.\n                    bc.extend(\n                        (freevars.index(instr.arg) + len(cellvars)).to_bytes(\n                            argsize,\n                            'little',\n                        )\n                    )\n            elif instr.absjmp:\n                # Resolve the absolute jump target.\n                bc.extend(\n                    self.bytecode_offset(instr.arg).to_bytes(\n                        argsize,\n                        'little',\n                    ),\n                )\n            elif instr.reljmp:\n                # Resolve the relative jump target.\n                # We do this by subtracting the curren't instructions's\n                # sparse index from the sparse index of the argument.\n                # We then subtract argsize - 1 to account for the bytes the\n                # current instruction takes up.\n                bytecode_offset = self.bytecode_offset\n                bc.extend((\n                    bytecode_offset(instr.arg) -\n                    bytecode_offset(instr) -\n                    argsize -\n                    1\n                ).to_bytes(argsize, 'little',))\n            elif instr.have_arg:\n                # Write any other arg here.\n                bc.extend(instr.arg.to_bytes(argsize, 'little'))\n            elif WORDCODE:\n                # with wordcode, all instructions are padded to 2 bytes\n                bc.append(0)\n\n        return CodeType(\n            self.argcount,\n            self.kwonlyargcount,\n            len(varnames),\n            self.stacksize,\n            self.py_flags,\n            bytes(bc),\n            consts,\n            names,\n            varnames,\n            self.filename,\n            self.name,\n            self.firstlineno,\n            self.py_lnotab,\n            freevars,\n            cellvars,\n        )\n\n    @property\n    def instrs(self):\n        \"\"\"The instructions in this code object.\n        \"\"\"\n        return self._instrs\n\n    @property\n    def sparse_instrs(self):\n        \"\"\"The instructions where the index of an instruction\n        is the bytecode offset of that instruction.\n\n        None indicates that no instruction is at that offset.\n        \"\"\"\n        return tuple(_sparse_args(self.instrs))\n\n    @property\n    def argcount(self):\n        \"\"\"The number of arguments this code object accepts.\n\n        This does not include varargs (\\*args).\n        \"\"\"\n        return self._argcount\n\n    @property\n    def kwonlyargcount(self):\n        \"\"\"The number of keyword only arguments this code object accepts.\n\n        This does not include varkwargs (\\*\\*kwargs).\n        \"\"\"\n        return self._kwonlyargcount\n\n    @property\n    def consts(self):\n        \"\"\"The constants referenced in this code object.\n        \"\"\"\n        # We cannot use a set comprehension because consts do not need\n        # to be hashable.\n        consts = []\n        append_const = consts.append\n        for instr in self.instrs:\n            if isinstance(instr, LOAD_CONST) and instr.arg not in consts:\n                append_const(instr.arg)\n        return tuple(consts)\n\n    @property\n    def names(self):\n        \"\"\"The names referenced in this code object.\n\n        Names come from instructions like LOAD_GLOBAL or STORE_ATTR\n        where the name of the global or attribute is needed at runtime.\n        \"\"\"\n        # We must sort to preserve the order between calls.\n        # The set comprehension is to drop the duplicates.\n        return tuple(sorted({\n            instr.arg for instr in self.instrs if instr.uses_name\n        }))\n\n    @property\n    def argnames(self):\n        \"\"\"The names of the arguments to this code object.\n\n        The format is: [args] [vararg] [kwonlyargs] [varkwarg]\n        where each group is optional.\n        \"\"\"\n        return self._argnames\n\n    @property\n    def varnames(self):\n        \"\"\"The names of all of the local variables in this code object.\n        \"\"\"\n        # We must sort to preserve the order between calls.\n        # The set comprehension is to drop the duplicates.\n        return self._argnames + tuple(sorted({\n            instr.arg\n            for instr in self.instrs\n            if instr.uses_varname and instr.arg not in self._argnames\n        }))\n\n    @property\n    def cellvars(self):\n        \"\"\"The names of the variables closed over by inner code objects.\n        \"\"\"\n        return self._cellvars\n\n    @property\n    def freevars(self):\n        \"\"\"The names of the variables this code object has closed over.\n        \"\"\"\n        return self._freevars\n\n    @property\n    def flags(self):\n        \"\"\"The flags of this code object represented as a mapping from flag\n        name to boolean status.\n\n        Notes\n        -----\n        This is a copy of the underlying flags. Mutations will not affect\n        the code object.\n        \"\"\"\n        return Flag.unpack(self._flags)\n\n    @property\n    def py_flags(self):\n        \"\"\"The flags of this code object represented as a bitmask.\n        \"\"\"\n        return self._flags\n\n    @property\n    def is_nested(self):\n        \"\"\"Is this a nested code object?\n        \"\"\"\n        return bool(self._flags & Flag.CO_NESTED)\n\n    @property\n    def is_generator(self):\n        \"\"\"Is this a generator?\n        \"\"\"\n        return bool(self._flags & Flag.CO_GENERATOR)\n\n    @property\n    def is_coroutine(self):\n        \"\"\"Is this a coroutine defined with async def?\n\n        This is 3.5 and greater.\n        \"\"\"\n        return bool(self._flags & Flag.CO_COROUTINE)\n\n    @property\n    def is_iterable_coroutine(self):\n        \"\"\"Is this an async generator defined with types.coroutine?\n\n        This is 3.5 and greater.\n        \"\"\"\n        return bool(self._flags & Flag.CO_ITERABLE_COROUTINE)\n\n    @property\n    def constructs_new_locals(self):\n        \"\"\"Does this code object construct new locals?\n\n        This is True for things like functions where executing the code\n        needs a new locals dict each time; however, something like a module\n        does not normally need new locals.\n        \"\"\"\n        return bool(self._flags & Flag.CO_NEWLOCALS)\n\n    @property\n    def filename(self):\n        \"\"\"The filename of this code object.\n        \"\"\"\n        return self._filename\n\n    @property\n    def name(self):\n        \"\"\"The name of this code object.\n        \"\"\"\n        return self._name\n\n    @property\n    def firstlineno(self):\n        \"\"\"The first source line from self.filename\n        that this code object represents.\n        \"\"\"\n        return self._firstlineno\n\n    @property\n    def lnotab(self):\n        \"\"\"The mapping of line number to the first instruction on that line.\n        \"\"\"\n        return self._lnotab\n\n    @lazyval\n    def lno_of_instr(self):\n        instrs = self.instrs\n        lnos = [None] * len(instrs)\n        reverse_lnotab = reverse_dict(self.lnotab)\n        for n, instr in enumerate(instrs):\n            lnos[n] = reverse_lnotab.get(instr)\n        return dict(zip(instrs, ffill(lnos)))\n\n    @property\n    def py_lnotab(self):\n        \"\"\"The encoded lnotab that python uses to compute when lines start.\n\n        Note\n        ----\n        See Objects/lnotab_notes.txt in the cpython source for more details.\n        \"\"\"\n        reverse_lnotab = reverse_dict(self.lnotab)\n        py_lnotab = []\n        prev_instr = 0\n        prev_lno = self.firstlineno\n        for addr, instr in enumerate(_sparse_args(self.instrs)):\n            lno = reverse_lnotab.get(instr)\n            if lno is None:\n                continue\n\n            delta = lno - prev_lno\n            py_lnotab.append(addr - prev_instr)\n            py_lnotab.append(min(delta, max_lnotab_increment))\n            delta -= max_lnotab_increment\n            while delta > 0:\n                py_lnotab.append(0)\n                py_lnotab.append(min(delta, max_lnotab_increment))\n                delta -= max_lnotab_increment\n\n            prev_lno = lno\n            prev_instr = addr\n\n        return bytes(py_lnotab)\n\n    @property\n    def stacksize(self):\n        \"\"\"The maximum amount of stack space used by this code object.\n        \"\"\"\n        return max(scanl(\n            op.add,\n            0,\n            map(op.attrgetter('stack_effect'), self.instrs),\n        ))\n\n    def index(self, instr):\n        \"\"\"Returns the index of instr.\n\n        Parameters\n        ----------\n        instr : Instruction\n            The instruction the check the index of.\n\n        Returns\n        -------\n        idx : int\n            The index of instr in this code object.\n        \"\"\"\n        return self.instrs.index(instr)\n\n    def bytecode_offset(self, instr):\n        \"\"\"Returns the offset of instr in the bytecode representation.\n\n        Parameters\n        ----------\n        instr : Instruction\n            The instruction the check the index of.\n\n        Returns\n        -------\n        idx : int\n            The index of instr in this code object in the sparse instructions.\n        \"\"\"\n        return self.sparse_instrs.index(instr)\n\n    def __getitem__(self, key):\n        return self.instrs[key]\n\n    def __iter__(self):\n        return iter(self.instrs)\n\n    def __len__(self):\n        return len(self.instrs)\n\n    def __contains__(self, instr):\n        return instr in self.instrs\n\n    def dis(self, file=None):\n        \"\"\"\n        Print self via the stdlib ``dis`` module.\n\n        Parameters\n        ----------\n        file : file-like, optional\n            A file-like object into which we should print.\n            Defaults to sys.stdout.\n        \"\"\"\n        dis(self.to_pycode(), file=file)\n"
  },
  {
    "path": "codetransformer/core.py",
    "content": "from collections import OrderedDict\nfrom contextlib import contextmanager\nfrom ctypes import py_object, pythonapi\nfrom itertools import chain\nfrom types import CodeType, FunctionType\nfrom weakref import WeakKeyDictionary\n\ntry:\n    import threading\nexcept ImportError:\n    import dummy_threading as threading\n\nfrom .code import Code\nfrom .instructions import LOAD_CONST, STORE_FAST, LOAD_FAST\nfrom .patterns import (\n    boundpattern,\n    patterndispatcher,\n    DEFAULT_STARTCODE,\n)\nfrom .utils.instance import instance\n\n\n_cell_new = pythonapi.PyCell_New\n_cell_new.argtypes = (py_object,)\n_cell_new.restype = py_object\n\n\ndef _a_if_not_none(a, b):\n    return a if a is not None else b\n\n\ndef _new_lnotab(instrs, lnotab):\n    \"\"\"The updated lnotab after the instructions have been transformed.\n\n    Parameters\n    ----------\n    instrs : iterable[Instruction]\n        The new instructions.\n    lnotab : dict[Instruction -> int]\n        The lnotab for the old code object.\n\n    Returns\n    -------\n    new_lnotab : dict[Instruction -> int]\n        The post transform lnotab.\n    \"\"\"\n    return {\n        lno: _a_if_not_none(instr._stolen_by, instr)\n        for lno, instr in lnotab.items()\n    }\n\n\nclass NoContext(Exception):\n    \"\"\"Exception raised to indicate that the ``code` or ``startcode``\n    attribute was accessed outside of a code context.\n    \"\"\"\n    def __init__(self):\n        return super().__init__('no active transformation context')\n\n\nclass Context:\n    \"\"\"Empty object for holding the transformation context.\n    \"\"\"\n    def __init__(self, code):\n        self.code = code\n        self.startcode = DEFAULT_STARTCODE\n\n    def __repr__(self):  # pragma: no cover\n        return '<%s: %r>' % (type(self).__name__, self.__dict__)\n\n\nclass CodeTransformerMeta(type):\n    \"\"\"Meta class for CodeTransformer to collect all of the patterns\n    and ensure the class dict is ordered.\n\n    Patterns are created when a method is decorated with\n    ``codetransformer.pattern.pattern``\n    \"\"\"\n    def __new__(mcls, name, bases, dict_):\n        dict_['patterndispatcher'] = patterndispatcher(*chain(\n            (v for v in dict_.values() if isinstance(v, boundpattern)),\n            *(\n                d and d.patterns for d in (\n                    getattr(b, 'patterndispatcher', ()) for b in bases\n                )\n            )\n        ))\n        return super().__new__(mcls, name, bases, dict_)\n\n    def __prepare__(self, bases):\n        return OrderedDict()\n\n\nclass CodeTransformer(metaclass=CodeTransformerMeta):\n    \"\"\"A code object transformer, similar to the NodeTransformer\n    from the ast module.\n\n    Attributes\n    ----------\n    code\n    \"\"\"\n    __slots__ = '__weakref__',\n\n    def transform_consts(self, consts):\n        \"\"\"transformer for the co_consts field.\n\n        Override this method to transform the `co_consts` of the code object.\n\n        Parameters\n        ----------\n        consts : tuple\n            The co_consts\n\n        Returns\n        -------\n        new_consts : tuple\n            The new constants.\n        \"\"\"\n        return tuple(\n            self.transform(Code.from_pycode(const)).to_pycode()\n            if isinstance(const, CodeType) else\n            const\n            for const in consts\n        )\n\n    def _id(self, obj):\n        \"\"\"Identity function.\n\n        Parameters\n        ----------\n        obj : any\n            The object to return\n\n        Returns\n        -------\n        obj : any\n            The input unchanged\n        \"\"\"\n        return obj\n\n    transform_name = _id\n    transform_names = _id\n    transform_varnames = _id\n    transform_freevars = _id\n    transform_cellvars = _id\n    transform_defaults = _id\n\n    del _id\n\n    def transform(self, code, *, name=None, filename=None):\n        \"\"\"Transform a codetransformer.Code object applying the transforms.\n\n        Parameters\n        ----------\n        code : Code\n            The code object to transform.\n        name : str, optional\n            The new name for this code object.\n        filename : str, optional\n            The new filename for this code object.\n\n        Returns\n        -------\n        new_code : Code\n            The transformed code object.\n        \"\"\"\n        # reverse lookups from for constants and names.\n        reversed_consts = {}\n        reversed_names = {}\n        reversed_varnames = {}\n        for instr in code:\n            if isinstance(instr, LOAD_CONST):\n                reversed_consts[instr] = instr.arg\n            if instr.uses_name:\n                reversed_names[instr] = instr.arg\n            if isinstance(instr, (STORE_FAST, LOAD_FAST)):\n                reversed_varnames[instr] = instr.arg\n\n        instrs, consts = tuple(zip(*reversed_consts.items())) or ((), ())\n        for instr, const in zip(instrs, self.transform_consts(consts)):\n            instr.arg = const\n\n        instrs, names = tuple(zip(*reversed_names.items())) or ((), ())\n        for instr, name_ in zip(instrs, self.transform_names(names)):\n            instr.arg = name_\n\n        instrs, varnames = tuple(zip(*reversed_varnames.items())) or ((), ())\n        for instr, varname in zip(instrs, self.transform_varnames(varnames)):\n            instr.arg = varname\n\n        with self._new_context(code):\n            post_transform = self.patterndispatcher(code)\n\n            return Code(\n                post_transform,\n                code.argnames,\n                cellvars=self.transform_cellvars(code.cellvars),\n                freevars=self.transform_freevars(code.freevars),\n                name=name if name is not None else code.name,\n                filename=filename if filename is not None else code.filename,\n                firstlineno=code.firstlineno,\n                lnotab=_new_lnotab(post_transform, code.lnotab),\n                flags=code.flags,\n            )\n\n    def __call__(self, f, *,\n                 globals_=None, name=None, defaults=None, closure=None):\n        # Callable so that we can use CodeTransformers as decorators.\n        if closure is not None:\n            closure = tuple(map(_cell_new, closure))\n        else:\n            closure = f.__closure__\n\n        return FunctionType(\n            self.transform(Code.from_pycode(f.__code__)).to_pycode(),\n            _a_if_not_none(globals_, f.__globals__),\n            _a_if_not_none(name, f.__name__),\n            _a_if_not_none(defaults, f.__defaults__),\n            closure,\n        )\n\n    @instance\n    class _context_stack(threading.local):\n        \"\"\"Thread safe transformation context stack.\n\n        Each thread will get it's own ``WeakKeyDictionary`` that maps\n        instances to a stack of ``Context`` objects. When this descriptor\n        is looked up we first try to get the weakkeydict off of the thread\n        local storage. If it doesn't exist we make a new map. Then we lookup\n        our instance in this map. If it doesn't exist yet create a new stack\n        (as an empty list).\n\n        This allows a single instance of ``CodeTransformer`` to be used\n        recursively to transform code objects in a thread safe way while\n        still being able to use a stateful context.\n        \"\"\"\n        def __get__(self, instance, owner):\n            try:\n                stacks = self._context_stacks\n            except AttributeError:\n                stacks = self._context_stacks = WeakKeyDictionary()\n\n            if instance is None:\n                # when looked up off the class return the current threads\n                # context stacks map\n                return stacks\n\n            return stacks.setdefault(instance, [])\n\n    @contextmanager\n    def _new_context(self, code):\n        self._context_stack.append(Context(code))\n        try:\n            yield\n        finally:\n            self._context_stack.pop()\n\n    @property\n    def context(self):\n        \"\"\"Lookup the current transformation context.\n\n        Raises\n        ------\n        NoContext\n            Raised when there is no active transformation context.\n        \"\"\"\n        try:\n            return self._context_stack[-1]\n        except IndexError:\n            raise NoContext()\n\n    @property\n    def code(self):\n        \"\"\"The code object we are currently manipulating.\n        \"\"\"\n        return self.context.code\n\n    @property\n    def startcode(self):\n        \"\"\"The startcode we are currently in.\n        \"\"\"\n        return self.context.startcode\n\n    def begin(self, startcode):\n        \"\"\"Begin a new startcode.\n\n        Parameters\n        ----------\n        startcode : any\n            The startcode to begin.\n        \"\"\"\n        self.context.startcode = startcode\n"
  },
  {
    "path": "codetransformer/decompiler/_343.py",
    "content": "import ast\nfrom collections import deque\nfrom functools import singledispatch\nfrom itertools import takewhile\nimport types\n\nfrom toolz import complement, compose, curry, sliding_window\nimport toolz.curried.operator as op\n\nfrom . import paramnames\nfrom ..code import Code\nfrom .. import instructions as instrs\nfrom ..utils.functional import not_a, is_a\nfrom ..utils.immutable import immutable\nfrom codetransformer import a as showa, d as showd  # noqa\n\n\n__all__ = [\n    'DecompilationContext',\n    'DecompilationError',\n    'decompile',\n    'pycode_to_body',\n]\n\n\nclass DecompilationError(Exception):\n    pass\n\n\nclass DecompilationContext(immutable,\n                           defaults={\n                               \"in_function_block\": False,\n                               \"in_lambda\": False,\n                               \"make_function_context\": None,\n                               \"top_of_loop\": None}):\n\n    \"\"\"\n    Value representing the context of the current decompilation run.\n    \"\"\"\n    __slots__ = (\n        'in_function_block',\n        'in_lambda',\n        'make_function_context',\n        'top_of_loop',\n    )\n\n\nclass MakeFunctionContext(immutable):\n    __slots__ = ('closure',)\n\n\ndef decompile(f):\n    \"\"\"\n    Decompile a function.\n\n    Parameters\n    ----------\n    f : function\n        The function to decompile.\n\n    Returns\n    -------\n    ast : ast.FunctionDef\n        A FunctionDef node that compiles to f.\n    \"\"\"\n    co = f.__code__\n    args, kwonly, varargs, varkwargs = paramnames(co)\n    annotations = f.__annotations__ or {}\n    defaults = list(f.__defaults__ or ())\n    kw_defaults = f.__kwdefaults__ or {}\n\n    if f.__name__ == '<lambda>':\n        node = ast.Lambda\n        body = pycode_to_body(co, DecompilationContext(in_lambda=True))[0]\n        extra_kwargs = {}\n    else:\n        node = ast.FunctionDef\n        body = pycode_to_body(co, DecompilationContext(in_function_block=True))\n        extra_kwargs = {\n            'decorator_list': [],\n            'returns': annotations.get('return')\n        }\n\n    return node(\n        name=f.__name__,\n        args=make_function_arguments(\n            args=args,\n            kwonly=kwonly,\n            varargs=varargs,\n            varkwargs=varkwargs,\n            defaults=defaults,\n            kw_defaults=kw_defaults,\n            annotations=annotations,\n        ),\n        body=body,\n        **extra_kwargs\n    )\n\n\ndef pycode_to_body(co, context):\n    \"\"\"\n    Convert a Python code object to a list of AST body elements.\n    \"\"\"\n    code = Code.from_pycode(co)\n\n    # On each instruction, temporarily store all the jumps to the **next**\n    # instruction.  This is used in _make_expr to determine when an expression\n    # is part of a short-circuiting expression.\n    for a, b in sliding_window(2, code.instrs):\n        a._next_target_of = b._target_of\n    b._next_target_of = set()\n\n    try:\n        body = instrs_to_body(deque(code.instrs), context)\n        if context.in_function_block:\n            return make_global_and_nonlocal_decls(code.instrs) + body\n        return body\n    finally:\n        # Clean up jump target data.\n        for i in code.instrs:\n            del i._next_target_of\n\n\ndef instrs_to_body(instrs, context):\n    \"\"\"\n    Convert a list of Instruction objects to a list of AST body nodes.\n    \"\"\"\n    stack = []\n    body = []\n    process_instrs(instrs, stack, body, context)\n\n    if stack:\n        raise DecompilationError(\n            \"Non-empty stack at the end of instrs_to_body(): %s.\" % stack\n        )\n    return body\n\n\ndef process_instrs(queue, stack, body, context):\n    \"\"\"\n    Process instructions from the instruction queue.\n    \"\"\"\n    next_instr = queue.popleft\n    while queue:\n        newcontext = _process_instr(next_instr(), queue, stack, body, context)\n        if newcontext is not None:\n            context = newcontext\n\n\n@singledispatch\ndef _process_instr(instr, queue, stack, body, context):\n    raise AssertionError(\n        \"process_instr() passed a non-instruction argument %s\" % type(instr)\n    )\n\n\n@_process_instr.register(instrs.Instruction)\ndef _instr(instr, queue, stack, body, context):\n    raise DecompilationError(\n        \"Don't know how to decompile instructions of type %s\" % type(instr)\n    )\n\n\n@_process_instr.register(instrs.POP_JUMP_IF_TRUE)\n@_process_instr.register(instrs.POP_JUMP_IF_FALSE)\ndef _process_jump(instr, queue, stack, body, context):\n    stack_effect_until_target = sum(\n        map(\n            op.attrgetter('stack_effect'),\n            takewhile(op.is_not(instr.arg), queue)\n        )\n    )\n    if stack_effect_until_target == 0:\n        body.append(make_if_statement(instr, queue, stack, context))\n        return\n    else:\n        raise DecompilationError(\n            \"Don't know how to decompile `and`/`or`/`ternary` exprs.\"\n        )\n\n\ndef make_if_statement(instr, queue, stack, context):\n    \"\"\"\n    Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.\n    \"\"\"\n    test_expr = make_expr(stack)\n    if isinstance(instr, instrs.POP_JUMP_IF_TRUE):\n        test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr)\n\n    first_block = popwhile(op.is_not(instr.arg), queue, side='left')\n    if isinstance(first_block[-1], instrs.RETURN_VALUE):\n        body = instrs_to_body(first_block, context)\n        return ast.If(test=test_expr, body=body, orelse=[])\n\n    jump_to_end = expect(\n        first_block.pop(), instrs.JUMP_FORWARD, \"at end of if-block\"\n    )\n\n    body = instrs_to_body(first_block, context)\n\n    # First instruction after the whole if-block.\n    end = jump_to_end.arg\n    if instr.arg is jump_to_end.arg:\n        orelse = []\n    else:\n        orelse = instrs_to_body(\n            popwhile(op.is_not(end), queue, side='left'),\n            context,\n        )\n\n    return ast.If(test=test_expr, body=body, orelse=orelse)\n\n\n@_process_instr.register(instrs.EXTENDED_ARG)\ndef _process_instr_extended_arg(instr, queue, stack, body, context):\n    \"\"\"We account for EXTENDED_ARG when constructing Code objects.\"\"\"\n    pass\n\n\n@_process_instr.register(instrs.UNPACK_SEQUENCE)\ndef _process_instr_unpack_sequence(instr, queue, stack, body, context):\n    body.append(make_assignment(instr, queue, stack))\n\n\n@_process_instr.register(instrs.IMPORT_NAME)\ndef _process_instr_import_name(instr, queue, stack, body, context):\n    \"\"\"\n    Process an IMPORT_NAME instruction.\n\n    Side Effects\n    ------------\n    Pops two instuctions from `stack`\n    Consumes instructions from `queue` to the end of the import statement.\n    Appends an ast.Import or ast.ImportFrom node to `body`.\n    \"\"\"\n    # If this is \"import module\", fromlist is None.\n    # If this this is \"from module import a, b fromlist will be ('a', 'b').\n    fromlist = stack.pop().arg\n\n    # level argument to __import__.  Should be 0, 1, or 2.\n    level = stack.pop().arg\n\n    module = instr.arg\n    if fromlist is None:  # Regular import.\n        attr_loads = _pop_import_LOAD_ATTRs(module, queue)\n        store = queue.popleft()\n        # There are two cases where we should emit an alias:\n        # import a as <anything but a>\n        # import a.b.c as <anything (including a)>\n        if attr_loads or module.split('.')[0] != store.arg:\n            asname = store.arg\n        else:\n            asname = None\n        body.append(\n            ast.Import(\n                names=[\n                    ast.alias(\n                        name=module,\n                        asname=(asname),\n                    ),\n                ],\n                level=level,\n            ),\n        )\n        return\n    elif fromlist == ('*',):  # From module import *.\n        expect(queue.popleft(), instrs.IMPORT_STAR, \"after IMPORT_NAME\")\n        body.append(\n            ast.ImportFrom(\n                module=module,\n                names=[ast.alias(name='*', asname=None)],\n                level=level,\n            ),\n        )\n        return\n\n    # Consume a pair of IMPORT_FROM, STORE_NAME instructions for each entry in\n    # fromlist.\n    names = list(map(make_importfrom_alias(queue, body, context), fromlist))\n    body.append(ast.ImportFrom(module=module, names=names, level=level))\n\n    # Remove the final POP_TOP of the imported module.\n    expect(queue.popleft(), instrs.POP_TOP, \"after 'from import'\")\n\n\ndef _pop_import_LOAD_ATTRs(module_name, queue):\n    \"\"\"\n    Pop LOAD_ATTR instructions for an import of the form::\n\n        import a.b.c as d\n\n    which should generate bytecode like this::\n\n        1           0 LOAD_CONST               0 (0)\n                    3 LOAD_CONST               1 (None)\n                    6 IMPORT_NAME              0 (a.b.c.d)\n                    9 LOAD_ATTR                1 (b)\n                   12 LOAD_ATTR                2 (c)\n                   15 LOAD_ATTR                3 (d)\n                   18 STORE_NAME               3 (d)\n    \"\"\"\n    popped = popwhile(is_a(instrs.LOAD_ATTR), queue, side='left')\n    if popped:\n        expected = module_name.split('.', maxsplit=1)[1]\n        actual = '.'.join(map(op.attrgetter('arg'), popped))\n        if expected != actual:\n            raise DecompilationError(\n                \"Decompiling import of module %s, but LOAD_ATTRS imply %s\" % (\n                    expected, actual,\n                )\n            )\n    return popped\n\n\n@curry\ndef make_importfrom_alias(queue, body, context, name):\n    \"\"\"\n    Make an ast.alias node for the names list of an ast.ImportFrom.\n\n    Parameters\n    ----------\n    queue : deque\n        Instruction Queue\n    body : list\n        Current body.\n    context : DecompilationContext\n    name : str\n        Expected name of the IMPORT_FROM node to be popped.\n\n    Returns\n    -------\n    alias : ast.alias\n\n    Side Effects\n    ------------\n    Consumes IMPORT_FROM and STORE_NAME instructions from queue.\n    \"\"\"\n    import_from, store = queue.popleft(), queue.popleft()\n    expect(import_from, instrs.IMPORT_FROM, \"after IMPORT_NAME\")\n\n    if not import_from.arg == name:\n        raise DecompilationError(\n            \"IMPORT_FROM name mismatch. Expected %r, but got %s.\" % (\n                name, import_from,\n            )\n        )\n    return ast.alias(\n        name=name,\n        asname=store.arg if store.arg != name else None,\n    )\n\n\n@_process_instr.register(instrs.COMPARE_OP)\n@_process_instr.register(instrs.UNARY_NOT)\n@_process_instr.register(instrs.BINARY_SUBSCR)\n@_process_instr.register(instrs.LOAD_ATTR)\n@_process_instr.register(instrs.LOAD_GLOBAL)\n@_process_instr.register(instrs.LOAD_CONST)\n@_process_instr.register(instrs.LOAD_FAST)\n@_process_instr.register(instrs.LOAD_NAME)\n@_process_instr.register(instrs.LOAD_DEREF)\n@_process_instr.register(instrs.LOAD_CLOSURE)\n@_process_instr.register(instrs.BUILD_TUPLE)\n@_process_instr.register(instrs.BUILD_SET)\n@_process_instr.register(instrs.BUILD_LIST)\n@_process_instr.register(instrs.BUILD_MAP)\n@_process_instr.register(instrs.STORE_MAP)\n@_process_instr.register(instrs.CALL_FUNCTION)\n@_process_instr.register(instrs.CALL_FUNCTION_VAR)\n@_process_instr.register(instrs.CALL_FUNCTION_KW)\n@_process_instr.register(instrs.CALL_FUNCTION_VAR_KW)\n@_process_instr.register(instrs.BUILD_SLICE)\n@_process_instr.register(instrs.JUMP_IF_TRUE_OR_POP)\n@_process_instr.register(instrs.JUMP_IF_FALSE_OR_POP)\ndef _push(instr, queue, stack, body, context):\n    \"\"\"\n    Just push these instructions onto the stack for further processing\n    downstream.\n    \"\"\"\n    stack.append(instr)\n\n\n@_process_instr.register(instrs.MAKE_FUNCTION)\n@_process_instr.register(instrs.MAKE_CLOSURE)\ndef _make_function(instr, queue, stack, body, context):\n    \"\"\"\n    Set a make_function_context, then push onto the stack.\n    \"\"\"\n    assert stack, \"Empty stack before MAKE_FUNCTION.\"\n    prev = stack[-1]\n    expect(prev, instrs.LOAD_CONST, \"before MAKE_FUNCTION\")\n\n    stack.append(instr)\n\n    if is_lambda_name(prev.arg):\n        return\n\n    return context.update(\n        make_function_context=MakeFunctionContext(\n            closure=isinstance(instr, instrs.MAKE_CLOSURE),\n        )\n    )\n\n\n@_process_instr.register(instrs.STORE_FAST)\n@_process_instr.register(instrs.STORE_NAME)\n@_process_instr.register(instrs.STORE_DEREF)\n@_process_instr.register(instrs.STORE_GLOBAL)\ndef _store(instr, queue, stack, body, context):\n    # This is set by MAKE_FUNCTION nodes to register that the next `STORE_NAME`\n    # should create a FunctionDef node.\n    if context.make_function_context is not None:\n        body.append(\n            make_function(\n                pop_arguments(instr, stack),\n                **context.make_function_context.to_dict()\n            ),\n        )\n        return context.update(make_function_context=None)\n\n    body.append(make_assignment(instr, queue, stack))\n\n\n@_process_instr.register(instrs.DUP_TOP)\ndef _dup_top(instr, queue, stack, body, context):\n    body.append(make_assignment(instr, queue, stack))\n\n\ndef make_assignment(instr, queue, stack):\n    \"\"\"\n    Make an ast.Assign node.\n    \"\"\"\n    value = make_expr(stack)\n\n    # Make assignment targets.\n    # If there are multiple assignments (e.g. 'a = b = c'),\n    # each LHS expression except the last is preceded by a DUP_TOP instruction.\n    # Thus, we make targets until we don't see a DUP_TOP, and then make one\n    # more.\n    targets = []\n    while isinstance(instr, instrs.DUP_TOP):\n        targets.append(make_assign_target(queue.popleft(), queue, stack))\n        instr = queue.popleft()\n\n    targets.append(make_assign_target(instr, queue, stack))\n\n    return ast.Assign(targets=targets, value=value)\n\n\n@singledispatch\ndef make_assign_target(instr, queue, stack):\n    \"\"\"\n    Make an AST node for the LHS of an assignment beginning at `instr`.\n    \"\"\"\n    raise DecompilationError(\"Can't make assignment target for %s.\" % instr)\n\n\n@make_assign_target.register(instrs.STORE_FAST)\n@make_assign_target.register(instrs.STORE_NAME)\n@make_assign_target.register(instrs.STORE_DEREF)\n@make_assign_target.register(instrs.STORE_GLOBAL)\ndef make_assign_target_store(instr, queue, stack):\n    return ast.Name(id=instr.arg, ctx=ast.Store())\n\n\n@make_assign_target.register(instrs.STORE_ATTR)\ndef make_assign_target_setattr(instr, queue, stack):\n    return ast.Attribute(\n        value=make_expr(stack),\n        attr=instr.arg,\n        ctx=ast.Store(),\n    )\n\n\n@make_assign_target.register(instrs.STORE_SUBSCR)\ndef make_assign_target_setitem(instr, queue, stack):\n    slice_ = make_slice(stack)\n    collection = make_expr(stack)\n    return ast.Subscript(\n        value=collection,\n        slice=slice_,\n        ctx=ast.Store(),\n    )\n\n\n@make_assign_target.register(instrs.UNPACK_SEQUENCE)\ndef make_assign_target_unpack(instr, queue, stack):\n    return ast.Tuple(\n        elts=[\n            make_assign_target(queue.popleft(), queue, stack)\n            for _ in range(instr.arg)\n        ],\n        ctx=ast.Store(),\n    )\n\n\n@make_assign_target.register(instrs.LOAD_NAME)\n@make_assign_target.register(instrs.LOAD_ATTR)\n@make_assign_target.register(instrs.BINARY_SUBSCR)\ndef make_assign_target_load_name(instr, queue, stack):\n    # We hit this case when a setattr or setitem is nested in a more complex\n    # assignment.  Just push the load onto the stack to be processed by the\n    # upcoming STORE_ATTR or STORE_SUBSCR.\n    stack.append(instr)\n    return make_assign_target(queue.popleft(), queue, stack)\n\n\n@_process_instr.register(instrs.STORE_ATTR)\n@_process_instr.register(instrs.STORE_SUBSCR)\ndef _store_subscr(instr, queue, stack, body, context):\n    target = make_assign_target(instr, queue, stack)\n    rhs = make_expr(stack)\n    body.append(ast.Assign(targets=[target], value=rhs))\n\n\n@_process_instr.register(instrs.POP_TOP)\ndef _pop(instr, queue, stack, body, context):\n    body.append(ast.Expr(value=make_expr(stack)))\n\n\n@_process_instr.register(instrs.RETURN_VALUE)\ndef _return(instr, queue, stack, body, context):\n    if context.in_function_block:\n        body.append(ast.Return(value=make_expr(stack)))\n    elif context.in_lambda:\n        if body:\n            raise DecompilationError(\"Non-empty body in lambda: %s\" % body)\n        # Just append the raw expr.  We'll extract the raw value in\n        # `make_lambda`.\n        body.append(make_expr(stack))\n    else:\n        _check_stack_for_module_return(stack)\n        # Pop dummy LOAD_CONST(None) at the end of a module.\n        stack.pop()\n        return\n\n\n@_process_instr.register(instrs.BREAK_LOOP)\ndef _jump_break_loop(instr, queue, stack, body, context):\n    if context.top_of_loop is None:\n        raise DecompilationError(\"BREAK_LOOP outside of loop.\")\n    body.append(ast.Break())\n\n\n@_process_instr.register(instrs.JUMP_ABSOLUTE)\ndef _jump_absolute(instr, queue, stack, body, context):\n    if instr.arg is context.top_of_loop:\n        body.append(ast.Continue())\n        return\n    raise DecompilationError(\"Don't know how to decompile %s.\" % instr)\n\n\n@_process_instr.register(instrs.SETUP_WITH)\ndef _process_instr_setup_with(instr, queue, stack, body, context):\n    items = [make_withitem(queue, stack)]\n    block_body = instrs_to_body(\n        pop_with_body_instrs(instr, queue),\n        context,\n    )\n\n    # Handle compound with statement (e.g. \"with a, b\").\n    if len(block_body) == 1 and isinstance(block_body[0], ast.With):\n        nested_with = block_body[0]\n        # Merge the inner block's items with our top-level items.\n        items += nested_with.items\n        # Use the inner block's body as the real body.\n        block_body = nested_with.body\n\n    return body.append(\n        ast.With(items=items, body=block_body)\n    )\n\n\ndef pop_with_body_instrs(setup_with_instr, queue):\n    \"\"\"\n    Pop instructions from `queue` that form the body of a with block.\n    \"\"\"\n    body_instrs = popwhile(op.is_not(setup_with_instr.arg), queue, side='left')\n\n    # Last two instructions should always be POP_BLOCK, LOAD_CONST(None).\n    # These don't correspond to anything in the AST, so remove them here.\n    load_none = body_instrs.pop()\n    expect(load_none, instrs.LOAD_CONST, \"at end of with-block\")\n    pop_block = body_instrs.pop()\n    expect(pop_block, instrs.POP_BLOCK, \"at end of with-block\")\n    if load_none.arg is not None:\n        raise DecompilationError(\n            \"Expected LOAD_CONST(None), but got \"\n            \"%r instead\" % (load_none)\n        )\n\n    # Target of the setup_with should be a WITH_CLEANUP instruction followed by\n    # an END_FINALLY.  Neither of these correspond to anything in the AST.\n    with_cleanup = queue.popleft()\n    expect(with_cleanup, instrs.WITH_CLEANUP, \"at end of with-block\")\n    end_finally = queue.popleft()\n    expect(end_finally, instrs.END_FINALLY, \"at end of with-block\")\n\n    return body_instrs\n\n\ndef make_withitem(queue, stack):\n    \"\"\"\n    Make an ast.withitem node.\n    \"\"\"\n    context_expr = make_expr(stack)\n    # This is a POP_TOP for just \"with <expr>:\".\n    # This is a STORE_NAME(name) for \"with <expr> as <name>:\".\n    as_instr = queue.popleft()\n    if isinstance(as_instr, (instrs.STORE_FAST,\n                             instrs.STORE_NAME,\n                             instrs.STORE_DEREF,\n                             instrs.STORE_GLOBAL)):\n        return ast.withitem(\n            context_expr=context_expr,\n            optional_vars=make_assign_target(as_instr, queue, stack),\n        )\n    elif isinstance(as_instr, instrs.POP_TOP):\n        return ast.withitem(context_expr=context_expr, optional_vars=None)\n    else:\n        raise DecompilationError(\n            \"Don't know how to make withitem from %s\" % as_instr,\n        )\n\n\n@_process_instr.register(instrs.SETUP_LOOP)\ndef _loop(instr, queue, stack, body, context):\n    loop_type, loop_body, else_body = pop_loop_instrs(instr, queue)\n    assert loop_type in ('for', 'while'), \"Unknown loop type %r\" % loop_type\n    if loop_type == 'for':\n        body.append(make_for_loop(loop_body, else_body, context))\n    elif loop_type == 'while':\n        body.append(make_while_loop(loop_body, else_body, context))\n\n\ndef make_for_loop(loop_body_instrs, else_body_instrs, context):\n    \"\"\"\n    Make an ast.For node.\n    \"\"\"\n    # Instructions from start until GET_ITER are the builders for the iterator\n    # expression.\n    iterator_expr = make_expr(\n        popwhile(not_a(instrs.GET_ITER), loop_body_instrs, side='left')\n    )\n\n    # Next is the GET_ITER instruction, which we don't need.\n    loop_body_instrs.popleft()\n\n    # Next is FOR_ITER, which is the jump target for Continue nodes.\n    top_of_loop = loop_body_instrs.popleft()\n\n    # This can be a STORE_* or an UNPACK_SEQUENCE followed by some number of\n    # stores.\n    target = make_assign_target(\n        loop_body_instrs.popleft(),\n        loop_body_instrs,\n        stack=[],\n    )\n\n    body, orelse_body = make_loop_body_and_orelse(\n        top_of_loop, loop_body_instrs, else_body_instrs, context\n    )\n\n    return ast.For(\n        target=target,\n        iter=iterator_expr,\n        body=body,\n        orelse=orelse_body,\n    )\n\n\ndef make_loop_body_and_orelse(top_of_loop, body_instrs, else_instrs, context):\n    \"\"\"\n    Make body and orelse lists for a for/while loop whose first instruction is\n    `top_of_loop`.\n\n    Parameters\n    ----------\n    top_of_loop : Instruction\n        The first body of the loop.  For a for-loop, this should always be a\n        FOR_ITER.  For a while loop, it's the first instruction of the stack\n        builders for the loop test expression\n    body_instrs : deque\n        Queue of Instructions that form the body of the loop.  The last two\n        elements of body_instrs should be a JUMP_ABSOLUTE to `top_of_loop` and\n        a POP_BLOCK.\n    else_instrs : deque\n        Queue of Instructions that form the else block of the loop.  Should be\n        an empty deque if there is no else block.\n    context : DecompilationContext\n\n    Returns\n    -------\n    body : list[ast.AST]\n        List of ast nodes forming the loop body.\n    orelse_body : list[ast.AST]\n        List of ast nodes forming the else-block body.\n    \"\"\"\n    # Remove the JUMP_ABSOLUTE and POP_BLOCK instructions at the bottom of the\n    # loop.\n    body_instrs.pop()\n    body_instrs.pop()\n    body = instrs_to_body(body_instrs, context.update(top_of_loop=top_of_loop))\n\n    if else_instrs:\n        else_body = instrs_to_body(else_instrs, context)\n    else:\n        else_body = []\n\n    return body, else_body\n\n\ndef make_while_loop(test_and_body_instrs, else_body_instrs, context):\n    \"\"\"\n    Make an ast.While node.\n\n    Parameters\n    ----------\n    test_and_body_instrs : deque\n        Queue of instructions forming the loop test expression and body.\n    else_body_instrs : deque\n        Queue of instructions forming the else block of the loop.\n    context : DecompilationContext\n    \"\"\"\n    top_of_loop = test_and_body_instrs[0]\n\n    # The popped elements are the stack_builders for the loop test expression.\n    # The top of the loop_body_instrs is either a POP_JUMP_IF_TRUE or a\n    # POP_JUMP_IF_FALSE.\n    test, body_instrs = make_while_loop_test_expr(test_and_body_instrs)\n    body, orelse_body = make_loop_body_and_orelse(\n        top_of_loop, body_instrs, else_body_instrs, context,\n    )\n\n    # while-else blocks are not yet supported or handled.\n    return ast.While(test=test, body=body, orelse=orelse_body)\n\n\ndef make_while_loop_test_expr(loop_body_instrs):\n    \"\"\"\n    Make an expression in the context of a while-loop test.\n\n    Code of the form::\n\n        while <expr>:\n            <body>\n\n    generates a POP_JUMP_IF_FALSE for the loop test, while code of the form::\n\n        while not <expr>:\n            <body>\n\n    generates a POP_JUMP_IF_TRUE for the loop test.\n\n    Code of the form::\n\n        while True:\n            <body>\n\n    generates no jumps at all.\n    \"\"\"\n    bottom_of_loop = loop_body_instrs[-1]\n    is_jump_to_bottom = compose(op.is_(bottom_of_loop), op.attrgetter('arg'))\n\n    # Consume instructions until we find a jump to the bottom of the loop.\n    test_builders = deque(\n        popwhile(complement(is_jump_to_bottom), loop_body_instrs, side='left')\n    )\n    # If we consumed the entire loop body without finding a jump, assume this\n    # is a while True loop.  Return the rest of the instructions as the loop\n    # body.\n    if not loop_body_instrs:\n        return ast.NameConstant(value=True), test_builders\n\n    # Top of the body is either a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.\n    jump = loop_body_instrs.popleft()\n    expr = make_expr(test_builders)\n    if isinstance(jump, instrs.POP_JUMP_IF_TRUE):\n        return ast.UnaryOp(op=ast.Not(), operand=expr), loop_body_instrs\n    else:\n        return expr, loop_body_instrs\n\n\ndef pop_loop_instrs(setup_loop_instr, queue):\n    \"\"\"\n    Determine whether setup_loop_instr is setting up a for-loop or a\n    while-loop.  Then pop the loop instructions from queue.\n\n    The easiest way to tell the difference is to look at the target of the\n    JUMP_ABSOLUTE instruction at the end of the loop.  If it jumps to a\n    FOR_ITER, then this is a for-loop.  Otherwise it's a while-loop.\n\n    The jump we want to inspect is the first JUMP_ABSOLUTE instruction prior to\n    the jump target of `setup_loop_instr`.\n\n    Parameters\n    ----------\n    setup_loop_instr : instructions.SETUP_LOOP\n        First instruction of the loop being parsed.\n    queue : collections.deque\n        Queue of unprocessed instructions.\n\n    Returns\n    -------\n    loop_type : str, {'for', 'while'}\n        The kind of loop being constructed.\n    loop_instrs : deque\n        The instructions forming body of the loop.\n    else_instrs : deque\n        The instructions forming the else-block of the loop.\n\n    Side Effects\n    ------------\n    Pops all returned instructions from `queue`.\n    \"\"\"\n    # Grab everything from left side of the queue until the jump target of\n    # SETUP_LOOP.\n    body = popwhile(op.is_not(setup_loop_instr.arg), queue, side='left')\n\n    # Anything after the last POP_BLOCK instruction is the else-block.\n    else_body = popwhile(not_a(instrs.POP_BLOCK), body, side='right')\n\n    jump_to_top, pop_block = body[-2], body[-1]\n    if not isinstance(jump_to_top, instrs.JUMP_ABSOLUTE):\n        raise DecompilationError(\n            \"Penultimate instruction of loop body is \"\n            \"%s, not JUMP_ABSOLUTE.\" % jump_to_top,\n        )\n\n    if not isinstance(pop_block, instrs.POP_BLOCK):\n        raise DecompilationError(\n            \"Last instruction of loop body is \"\n            \"%s, not pop_block.\" % pop_block,\n        )\n\n    loop_expr = jump_to_top.arg\n    if isinstance(loop_expr, instrs.FOR_ITER):\n        return 'for', body, else_body\n    return 'while', body, else_body\n\n\ndef make_expr(stack_builders):\n    \"\"\"\n    Convert a sequence of instructions into AST expressions.\n    \"\"\"\n    return _make_expr(stack_builders.pop(), stack_builders)\n\n\n_BOOLOP_JUMP_TO_AST_OP = {\n    instrs.JUMP_IF_TRUE_OR_POP: ast.Or,\n    instrs.JUMP_IF_FALSE_OR_POP: ast.And,\n}\n_BOOLOP_JUMP_TYPES = tuple(_BOOLOP_JUMP_TO_AST_OP)\n\n\ndef _make_expr(toplevel, stack_builders):\n    \"\"\"\n    Override the single-dispatched make_expr with wrapper logic for handling\n    short-circuiting expressions.\n    \"\"\"\n    base_expr = _make_expr_internal(toplevel, stack_builders)\n    if not toplevel._next_target_of:\n        return base_expr\n\n    subexprs = deque([base_expr])\n    ops = deque([])\n    while stack_builders and stack_builders[-1] in toplevel._next_target_of:\n        jump = stack_builders.pop()\n        if not isinstance(jump, _BOOLOP_JUMP_TYPES):\n            raise DecompilationError(\n                \"Don't know how to decompile %s inside expression.\" % jump,\n            )\n        subexprs.appendleft(make_expr(stack_builders))\n        ops.appendleft(_BOOLOP_JUMP_TO_AST_OP[type(jump)]())\n\n    if len(subexprs) <= 1:\n        raise DecompilationError(\n            \"Expected at least one JUMP instruction before expression.\"\n        )\n\n    return normalize_boolop(make_boolop(subexprs, ops))\n\n\ndef make_boolop(exprs, op_types):\n    \"\"\"\n    Parameters\n    ----------\n    exprs : deque\n    op_types : deque[{ast.And, ast.Or}]\n    \"\"\"\n    if len(op_types) > 1:\n        return ast.BoolOp(\n            op=op_types.popleft(),\n            values=[exprs.popleft(), make_boolop(exprs, op_types)],\n        )\n\n    assert len(exprs) == 2\n    return ast.BoolOp(op=op_types.popleft(), values=list(exprs))\n\n\ndef normalize_boolop(expr):\n    \"\"\"\n    Normalize a boolop by folding together nested And/Or exprs.\n    \"\"\"\n    optype = expr.op\n    newvalues = []\n    for subexpr in expr.values:\n        if not isinstance(subexpr, ast.BoolOp):\n            newvalues.append(subexpr)\n        elif type(subexpr.op) != type(optype):\n            newvalues.append(normalize_boolop(subexpr))\n        else:\n            # Normalize subexpression, then inline its values into the\n            # top-level subexpr.\n            newvalues.extend(normalize_boolop(subexpr).values)\n    return ast.BoolOp(op=optype, values=newvalues)\n\n\n@singledispatch\ndef _make_expr_internal(toplevel, stack_builders):\n    raise DecompilationError(\n        \"Don't know how to build expression for %s\" % toplevel\n    )\n\n\n@_make_expr_internal.register(instrs.MAKE_FUNCTION)\n@_make_expr_internal.register(instrs.MAKE_CLOSURE)\ndef _make_lambda(toplevel, stack_builders):\n    load_name = stack_builders.pop()\n    load_code = stack_builders.pop()\n    _check_make_function_instrs(\n        load_code,\n        load_name,\n        toplevel,\n        expect_lambda=True,\n    )\n\n    co = load_code.arg\n    args, kwonly, varargs, varkwargs = paramnames(co)\n    defaults, kw_defaults, annotations = make_defaults_and_annotations(\n        toplevel,\n        stack_builders,\n    )\n    if annotations:\n        raise DecompilationError(\n            \"Unexpected annotations while building lambda: %s\" % annotations\n        )\n\n    if isinstance(toplevel, instrs.MAKE_CLOSURE):\n        # There should be a tuple of closure cells still on the stack here.\n        # These don't appear in the AST, but we need to consume them to ensure\n        # correctness down the line.\n        _closure_cells = make_closure_cells(stack_builders)  # noqa\n\n    body = pycode_to_body(co, DecompilationContext(in_lambda=True))\n    if len(body) != 1:\n        raise DecompilationError(\n            \"Got multiple expresssions for lambda: %s\" % body,\n        )\n    body = body[0]\n\n    return ast.Lambda(\n        args=make_function_arguments(\n            args,\n            kwonly,\n            varargs,\n            varkwargs,\n            defaults,\n            kw_defaults,\n            annotations,\n        ),\n        body=body,\n    )\n\n\n@_make_expr_internal.register(instrs.UNARY_NOT)\ndef _make_expr_unary_not(toplevel, stack_builders):\n    return ast.UnaryOp(\n        op=ast.Not(),\n        operand=make_expr(stack_builders),\n    )\n\n\n@_make_expr_internal.register(instrs.CALL_FUNCTION)\ndef _make_expr_call_function(toplevel, stack_builders):\n    keywords = make_call_keywords(stack_builders, toplevel.keyword)\n    positionals = make_call_positionals(stack_builders, toplevel.positional)\n    return ast.Call(\n        func=make_expr(stack_builders),\n        args=positionals,\n        keywords=keywords,\n        starargs=None,\n        kwargs=None,\n    )\n\n\n@_make_expr_internal.register(instrs.CALL_FUNCTION_VAR)\ndef _make_expr_call_function_var(toplevel, stack_builders):\n    starargs = make_expr(stack_builders)\n    keywords = make_call_keywords(stack_builders, toplevel.keyword)\n    positionals = make_call_positionals(stack_builders, toplevel.positional)\n    return ast.Call(\n        func=make_expr(stack_builders),\n        args=positionals,\n        keywords=keywords,\n        starargs=starargs,\n        kwargs=None,\n    )\n\n\n@_make_expr_internal.register(instrs.CALL_FUNCTION_KW)\ndef _make_expr_call_function_kw(toplevel, stack_builders):\n    kwargs = make_expr(stack_builders)\n    keywords = make_call_keywords(stack_builders, toplevel.keyword)\n    positionals = make_call_positionals(stack_builders, toplevel.positional)\n    return ast.Call(\n        func=make_expr(stack_builders),\n        args=positionals,\n        keywords=keywords,\n        starargs=None,\n        kwargs=kwargs,\n    )\n\n\n@_make_expr_internal.register(instrs.CALL_FUNCTION_VAR_KW)\ndef _make_expr_call_function_var_kw(toplevel, stack_builders):\n    kwargs = make_expr(stack_builders)\n    starargs = make_expr(stack_builders)\n    keywords = make_call_keywords(stack_builders, toplevel.keyword)\n    positionals = make_call_positionals(stack_builders, toplevel.positional)\n    return ast.Call(\n        func=make_expr(stack_builders),\n        args=positionals,\n        keywords=keywords,\n        starargs=starargs,\n        kwargs=kwargs,\n    )\n\n\ndef make_call_keywords(stack_builders, count):\n    \"\"\"\n    Make the keywords entry for an ast.Call node.\n    \"\"\"\n    out = []\n    for _ in range(count):\n        value = make_expr(stack_builders)\n        load_kwname = stack_builders.pop()\n        if not isinstance(load_kwname, instrs.LOAD_CONST):\n            raise DecompilationError(\n                \"Expected a LOAD_CONST, but got %r\" % load_kwname\n            )\n        if not isinstance(load_kwname.arg, str):\n            raise DecompilationError(\n                \"Expected LOAD_CONST of a str, but got %r.\" % load_kwname,\n            )\n        out.append(ast.keyword(arg=load_kwname.arg, value=value))\n    out.reverse()\n    return out\n\n\ndef make_call_positionals(stack_builders, count):\n    \"\"\"\n    Make the args entry for an ast.Call node.\n    \"\"\"\n    out = [make_expr(stack_builders) for _ in range(count)]\n    out.reverse()\n    return out\n\n\n@_make_expr_internal.register(instrs.BUILD_TUPLE)\ndef _make_expr_tuple(toplevel, stack_builders):\n    return ast.Tuple(\n        ctx=ast.Load(),\n        elts=make_exprs(stack_builders, toplevel.arg),\n    )\n\n\n@_make_expr_internal.register(instrs.BUILD_SET)\ndef _make_expr_set(toplevel, stack_builders):\n    return ast.Set(\n        ctx=ast.Load(),\n        elts=make_exprs(stack_builders, toplevel.arg),\n    )\n\n\n@_make_expr_internal.register(instrs.BUILD_LIST)\ndef _make_expr_list(toplevel, stack_builders):\n    return ast.List(\n        ctx=ast.Load(),\n        elts=make_exprs(stack_builders, toplevel.arg),\n    )\n\n\ndef make_exprs(stack_builders, count):\n    \"\"\"\n    Make elements of set/list/tuple literal.\n    \"\"\"\n    exprs = [make_expr(stack_builders) for _ in range(count)]\n    # Elements are on the stack from right to left, but we want them from right\n    # to left.\n    exprs.reverse()\n    return exprs\n\n\n@_make_expr_internal.register(instrs.BUILD_MAP)\ndef _make_expr_empty_dict(toplevel, stack_builders):\n    \"\"\"\n    This should only be hit for empty dicts.  Anything else should hit the\n    STORE_MAP handler instead.\n    \"\"\"\n    if toplevel.arg:\n        raise DecompilationError(\n            \"make_expr() called with nonzero BUILD_MAP arg %d\" % toplevel.arg\n        )\n\n    if stack_builders:\n        raise DecompilationError(\n            \"Unexpected stack_builders for BUILD_MAP(0): %s\" % stack_builders\n        )\n    return ast.Dict(keys=[], values=[])\n\n\n@_make_expr_internal.register(instrs.STORE_MAP)\ndef _make_expr_dict(toplevel, stack_builders):\n\n    # Push toplevel back onto the stack so that it gets correctly consumed by\n    # `_make_dict_elems`.\n    stack_builders.append(toplevel)\n\n    build_map = find_build_map(stack_builders)\n    dict_builders = popwhile(\n        op.is_not(build_map), stack_builders, side='right'\n    )\n\n    # Consume the BUILD_MAP instruction.\n    _build_map = stack_builders.pop()\n    assert _build_map is build_map\n\n    keys, values = _make_dict_elems(build_map, dict_builders)\n    return ast.Dict(keys=keys, values=values)\n\n\ndef find_build_map(stack_builders):\n    \"\"\"\n    Find the BUILD_MAP instruction for which the last element of\n    ``stack_builders`` is a store.\n    \"\"\"\n    assert isinstance(stack_builders[-1], instrs.STORE_MAP)\n\n    to_consume = 0\n    for instr in reversed(stack_builders):\n        if isinstance(instr, instrs.STORE_MAP):\n            # NOTE: This branch should always be hit on the first iteration.\n            to_consume += 1\n        elif isinstance(instr, instrs.BUILD_MAP):\n            to_consume -= instr.arg\n            if to_consume <= 0:\n                return instr\n    else:\n        raise DecompilationError(\n            \"Couldn't find BUILD_MAP for last element of %s.\" % stack_builders\n        )\n\n\ndef _make_dict_elems(build_instr, builders):\n    \"\"\"\n    Return a list of keys and a list of values for the dictionary literal\n    generated by ``build_instr``.\n    \"\"\"\n    keys = []\n    values = []\n    for _ in range(build_instr.arg):\n        popped = builders.pop()\n        if not isinstance(popped, instrs.STORE_MAP):\n            raise DecompilationError(\n                \"Expected a STORE_MAP but got %s\" % popped\n            )\n\n        keys.append(make_expr(builders))\n        values.append(make_expr(builders))\n\n    # Keys and values are emitted in reverse order of how they appear in the\n    # AST.\n    keys.reverse()\n    values.reverse()\n    return keys, values\n\n\n@_make_expr_internal.register(instrs.LOAD_DEREF)\n@_make_expr_internal.register(instrs.LOAD_NAME)\n@_make_expr_internal.register(instrs.LOAD_CLOSURE)\n@_make_expr_internal.register(instrs.LOAD_FAST)\n@_make_expr_internal.register(instrs.LOAD_GLOBAL)\ndef _make_expr_name(toplevel, stack_builders):\n    return ast.Name(id=toplevel.arg, ctx=ast.Load())\n\n\n@_make_expr_internal.register(instrs.LOAD_ATTR)\ndef _make_expr_attr(toplevel, stack_builders):\n    return ast.Attribute(\n        value=make_expr(stack_builders),\n        attr=toplevel.arg,\n        ctx=ast.Load(),\n    )\n\n\n@_make_expr_internal.register(instrs.BINARY_SUBSCR)\ndef _make_expr_getitem(toplevel, stack_builders):\n    slice_ = make_slice(stack_builders)\n    value = make_expr(stack_builders)\n    return ast.Subscript(slice=slice_, value=value, ctx=ast.Load())\n\n\ndef make_slice(stack_builders):\n    \"\"\"\n    Make an expression in the context of a slice.\n\n    This mostly delegates to _make_expr, but wraps nodes in `ast.Index` or\n    `ast.Slice` as appropriate.\n    \"\"\"\n    return _make_slice(stack_builders.pop(), stack_builders)\n\n\n@singledispatch\ndef _make_slice(toplevel, stack_builders):\n    return ast.Index(_make_expr(toplevel, stack_builders))\n\n\n@_make_slice.register(instrs.BUILD_SLICE)\ndef make_slice_build_slice(toplevel, stack_builders):\n    return _make_expr(toplevel, stack_builders)\n\n\n@_make_slice.register(instrs.BUILD_TUPLE)\ndef make_slice_tuple(toplevel, stack_builders):\n    slice_ = _make_expr(toplevel, stack_builders)\n    if isinstance(slice_, ast.Tuple):\n        # a = b[c, d] generates Index(value=Tuple(...))\n        # a = b[c:, d] generates ExtSlice(dims=[Slice(...), Index(...)])\n        slice_ = normalize_tuple_slice(slice_)\n    return slice_\n\n\ndef normalize_tuple_slice(node):\n    \"\"\"\n    Normalize an ast.Tuple node representing the internals of a slice.\n\n    Returns the node wrapped in an ast.Index.\n    Returns an ExtSlice node built from the tuple elements if there are any\n    slices.\n    \"\"\"\n    if not any(isinstance(elt, ast.Slice) for elt in node.elts):\n        return ast.Index(value=node)\n\n    return ast.ExtSlice(\n        [\n            # Wrap non-Slice nodes in Index nodes.\n            elt if isinstance(elt, ast.Slice) else ast.Index(value=elt)\n            for elt in node.elts\n        ]\n    )\n\n\n@_make_expr_internal.register(instrs.BUILD_SLICE)\ndef _make_expr_build_slice(toplevel, stack_builders):\n    # Arg is always either 2 or 3.  If it's 3, then the first expression is the\n    # step value.\n    if toplevel.arg == 3:\n        step = make_expr(stack_builders)\n    else:\n        step = None\n\n    def normalize_empty_slice(node):\n        \"\"\"\n        Convert LOAD_CONST(None) to just None.\n\n        This normalizes slices of the form a[b:None] to just a[b:].\n        \"\"\"\n        if isinstance(node, ast.NameConstant) and node.value is None:\n            return None\n        return node\n\n    upper = normalize_empty_slice(make_expr(stack_builders))\n    lower = normalize_empty_slice(make_expr(stack_builders))\n\n    return ast.Slice(lower=lower, upper=upper, step=step)\n\n\n@_make_expr_internal.register(instrs.LOAD_CONST)\ndef _make_expr_const(toplevel, stack_builders):\n    return _make_const(toplevel.arg)\n\n\n@singledispatch\ndef _make_const(const):\n    raise DecompilationError(\n        \"Don't know how to make constant node for %r.\" % (const,)\n    )\n\n\n@_make_const.register(float)\n@_make_const.register(complex)\n@_make_const.register(int)\ndef _make_const_number(const):\n    return ast.Num(n=const)\n\n\n@_make_const.register(str)\ndef _make_const_str(const):\n    return ast.Str(s=const)\n\n\n@_make_const.register(bytes)\ndef _make_const_bytes(const):\n    return ast.Bytes(s=const)\n\n\n@_make_const.register(tuple)\ndef _make_const_tuple(const):\n    return ast.Tuple(elts=list(map(_make_const, const)), ctx=ast.Load())\n\n\n@_make_const.register(type(None))\ndef _make_const_none(none):\n    return ast.NameConstant(value=None)\n\n\nbinops = frozenset([\n    (instrs.BINARY_ADD, ast.Add),\n    (instrs.BINARY_SUBTRACT, ast.Sub),\n    (instrs.BINARY_MULTIPLY, ast.Mult),\n    (instrs.BINARY_POWER, ast.Pow),\n    (instrs.BINARY_TRUE_DIVIDE, ast.Div),\n    (instrs.BINARY_FLOOR_DIVIDE, ast.FloorDiv),\n    (instrs.BINARY_MODULO, ast.Mod),\n    (instrs.BINARY_LSHIFT, ast.LShift),\n    (instrs.BINARY_RSHIFT, ast.RShift),\n    (instrs.BINARY_AND, ast.BitAnd),\n    (instrs.BINARY_XOR, ast.BitXor),\n    (instrs.BINARY_OR, ast.BitOr),\n])\n\n\ndef _binop_handler(nodetype):\n    \"\"\"\n    Factory function for binary operator handlers.\n    \"\"\"\n    def _handler(toplevel, stack_builders):\n        right = make_expr(stack_builders)\n        left = make_expr(stack_builders)\n        return ast.BinOp(left=left, op=nodetype(), right=right)\n    return _handler\n\n\nfor instrtype, nodetype in binops:\n    _process_instr.register(instrtype)(_push)\n    _make_expr_internal.register(instrtype)(_binop_handler(nodetype))\n\n\ndef make_function(function_builders, *, closure):\n    \"\"\"\n    Construct a FunctionDef AST node from a sequence of the form:\n\n    LOAD_CLOSURE, N times (when handling MAKE_CLOSURE)\n    BUILD_TUPLE(N) (when handling MAKE_CLOSURE)\n    <decorator builders> (optional)\n    <default builders>, (optional)\n    <annotation builders> (optional)\n    LOAD_CONST(<tuple of annotated names>) (optional)\n    LOAD_CONST(code),\n    LOAD_CONST(name),\n    MAKE_FUNCTION | MAKE_CLOSURE\n    <decorator calls> (optional)\n    \"\"\"\n    decorator_calls = deque()\n    while isinstance(function_builders[-1], instrs.CALL_FUNCTION):\n        decorator_calls.appendleft(function_builders.pop())\n\n    *builders, load_code_instr, load_name_instr, make_function_instr = (\n        function_builders\n    )\n\n    _check_make_function_instrs(\n        load_code_instr, load_name_instr, make_function_instr,\n    )\n\n    co = load_code_instr.arg\n    name = load_name_instr.arg\n    args, kwonly, varargs, varkwargs = paramnames(co)\n\n    # Convert default and annotation builders to AST nodes.\n    defaults, kw_defaults, annotations = make_defaults_and_annotations(\n        make_function_instr,\n        builders,\n    )\n\n    # Convert decorator function builders.  The stack is in reverse order.\n    decorators = [make_expr(builders) for _ in decorator_calls]\n    decorators.reverse()\n\n    if closure:\n        # There should be a tuple of closure cells still on the stack here.\n        # These don't appear in the AST, but we need to consume them to ensure\n        # correctness down the line.\n        closure_cells = make_closure_cells(builders)  # noqa\n\n    # We should have consumed all our builders by this point.\n    if builders:\n        raise DecompilationError(\n            \"Unexpected leftover builders for %s: %s.\" % (\n                make_function_instr, builders\n            )\n        )\n\n    return ast.FunctionDef(\n        body_code=co,\n        name=name.split('.')[-1],\n        args=make_function_arguments(\n            args,\n            kwonly,\n            varargs,\n            varkwargs,\n            defaults,\n            kw_defaults,\n            annotations,\n        ),\n        body=pycode_to_body(co, DecompilationContext(in_function_block=True)),\n        decorator_list=decorators,\n        returns=annotations.get('return'),\n    )\n\n\ndef make_function_arguments(args,\n                            kwonly,\n                            varargs,\n                            varkwargs,\n                            defaults,\n                            kw_defaults,\n                            annotations):\n    \"\"\"\n    Make an ast.arguments from the args parsed out of a code object.\n    \"\"\"\n    return ast.arguments(\n        args=[ast.arg(arg=a, annotation=annotations.get(a)) for a in args],\n        kwonlyargs=[\n            ast.arg(arg=a, annotation=annotations.get(a)) for a in kwonly\n        ],\n        defaults=defaults,\n        kw_defaults=list(map(kw_defaults.get, kwonly)),\n        vararg=None if varargs is None else ast.arg(\n            arg=varargs, annotation=annotations.get(varargs),\n        ),\n        kwarg=None if varkwargs is None else ast.arg(\n            arg=varkwargs, annotation=annotations.get(varkwargs)\n        ),\n    )\n\n\ndef make_closure_cells(stack_builders):\n    cells = make_expr(stack_builders)\n    if not isinstance(cells, ast.Tuple):\n        raise DecompilationError(\n            \"Expected an ast.Tuple of closure cells, \"\n            \"but got %s\" % cells,\n        )\n    return cells\n\n\ndef make_global_and_nonlocal_decls(code_instrs):\n    \"\"\"\n    Find all STORE_GLOBAL and STORE_DEREF instructions in `instrs` and convert\n    them into a canonical list of `ast.Global` and `ast.Nonlocal` declarations.\n    \"\"\"\n    globals_ = sorted(set(\n        i.arg for i in code_instrs if isinstance(i, instrs.STORE_GLOBAL)\n    ))\n    nonlocals = sorted(set(\n        i.arg for i in code_instrs\n        if isinstance(i, instrs.STORE_DEREF) and i.vartype == 'free'\n    ))\n\n    out = []\n    if globals_:\n        out.append(ast.Global(names=globals_))\n    if nonlocals:\n        out.append(ast.Nonlocal(names=nonlocals))\n    return out\n\n\ndef make_defaults_and_annotations(make_function_instr, builders):\n    \"\"\"\n    Get the AST expressions corresponding to the defaults, kwonly defaults, and\n    annotations for a function created by `make_function_instr`.\n    \"\"\"\n    # Integer counts.\n    n_defaults, n_kwonlydefaults, n_annotations = unpack_make_function_arg(\n        make_function_instr.arg\n    )\n    if n_annotations:\n        # TOS should be a tuple of annotation names.\n        load_annotation_names = builders.pop()\n        annotations = dict(zip(\n            reversed(load_annotation_names.arg),\n            (make_expr(builders) for _ in range(n_annotations - 1))\n        ))\n    else:\n        annotations = {}\n\n    kwonlys = {}\n    while n_kwonlydefaults:\n        default_expr = make_expr(builders)\n        key_instr = builders.pop()\n        if not isinstance(key_instr, instrs.LOAD_CONST):\n            raise DecompilationError(\n                \"kwonlydefault key is not a LOAD_CONST: %s\" % key_instr\n            )\n        if not isinstance(key_instr.arg, str):\n            raise DecompilationError(\n                \"kwonlydefault key builder is not a \"\n                \"'LOAD_CONST of a string: %s\" % key_instr\n            )\n\n        kwonlys[key_instr.arg] = default_expr\n        n_kwonlydefaults -= 1\n\n    defaults = make_exprs(builders, n_defaults)\n    return defaults, kwonlys, annotations\n\n\ndef unpack_make_function_arg(arg):\n    \"\"\"\n    Unpack the argument to a MAKE_FUNCTION instruction.\n\n    Parameters\n    ----------\n    arg : int\n        The argument to a MAKE_FUNCTION instruction.\n\n    Returns\n    -------\n    num_defaults, num_kwonly_default_pairs, num_annotations\n\n    See Also\n    --------\n    https://docs.python.org/3/library/dis.html#opcode-MAKE_FUNCTION\n    \"\"\"\n    return arg & 0xFF, (arg >> 8) & 0xFF, (arg >> 16) & 0x7FFF\n\n\ndef _check_make_function_instrs(load_code_instr,\n                                load_name_instr,\n                                make_function_instr,\n                                *,\n                                expect_lambda=False):\n    \"\"\"\n    Validate the instructions passed to a make_function call.\n    \"\"\"\n\n    # Validate load_code_instr.\n    if not isinstance(load_code_instr, instrs.LOAD_CONST):\n        raise TypeError(\n            \"make_function expected 'load_code_instr` to be a \"\n            \"LOAD_CONST, but got %s\" % load_code_instr,\n        )\n    if not isinstance(load_code_instr.arg, types.CodeType):\n        raise TypeError(\n            \"make_function expected load_code_instr \"\n            \"to load a code object, but got %s\" % load_code_instr.arg,\n        )\n\n    # Validate load_name_instr\n    if not isinstance(load_name_instr, instrs.LOAD_CONST):\n        raise TypeError(\n            \"make_function expected 'load_name_instr` to be a \"\n            \"LOAD_CONST, but got %s\" % load_code_instr,\n        )\n\n    if not isinstance(load_name_instr.arg, str):\n        raise TypeError(\n            \"make_function expected load_name_instr \"\n            \"to load a string, but got %r instead\" % load_name_instr.arg\n        )\n\n    # This is an endswith rather than '==' because the arg is the\n    # fully-qualified name.\n    is_lambda = is_lambda_name(load_name_instr.arg)\n    if expect_lambda and not is_lambda:\n        raise ValueError(\n            \"Expected to make a function named <lambda>, but \"\n            \"got %r instead.\" % load_name_instr.arg\n        )\n    if not expect_lambda and is_lambda:\n        raise ValueError(\"Unexpectedly received lambda function.\")\n\n    # Validate make_function_instr\n    if not isinstance(make_function_instr, (instrs.MAKE_FUNCTION,\n                                            instrs.MAKE_CLOSURE)):\n        raise TypeError(\n            \"make_function expected a MAKE_FUNCTION or MAKE_CLOSURE\"\n            \"instruction, but got %s instead.\" % make_function_instr\n        )\n\n\ndef pop_arguments(instr, stack):\n    \"\"\"\n    Pop instructions off `stack` until we pop all instructions that will\n    produce values popped by `instr`.\n    \"\"\"\n    needed = instr.stack_effect\n    if needed >= 0:\n        raise DecompilationError(\n            \"%s is does not have a negative stack effect\" % instr\n        )\n\n    for popcount, to_pop in enumerate(reversed(stack), start=1):\n        needed += to_pop.stack_effect\n        if not needed:\n            break\n    else:\n        raise DecompilationError(\n            \"Reached end of stack without finding inputs to %s\" % instr,\n        )\n\n    popped = stack[-popcount:]\n    stack[:] = stack[:-popcount]\n\n    return popped\n\n\ndef _check_stack_for_module_return(stack):\n    \"\"\"\n    Verify that the stack is in the expected state before the dummy\n    RETURN_VALUE instruction of a module or class.\n    \"\"\"\n    fail = (\n        len(stack) != 1\n        or not isinstance(stack[0], instrs.LOAD_CONST)\n        or stack[0].arg is not None\n    )\n\n    if fail:\n        raise DecompilationError(\n            \"Reached end of non-function code \"\n            \"block with unexpected stack: %s.\" % stack\n        )\n\n\ndef expect(instr, expected, context):\n    \"\"\"\n    Check that an instruction is of the expected type.\n    \"\"\"\n    if not isinstance(instr, expected):\n        raise DecompilationError(\n            \"Expected a {expected} instruction {context}. Got {instr}.\".format(\n                instr=instr, expected=expected, context=context,\n            )\n        )\n    return instr\n\n\ndef is_lambda_name(name):\n    \"\"\"\n    Check if `name` is the name of lambda function.\n    \"\"\"\n    return name.endswith('<lambda>')\n\n\ndef popwhile(cond, queue, *, side):\n    \"\"\"\n    Pop elements off a queue while `cond(nextelem)` is True.\n\n    Parameters\n    ----------\n    cond : predicate\n    queue : deque\n    side : {'left', 'right'}\n\n    Returns\n    -------\n    popped : deque\n\n    Examples\n    --------\n    >>> from collections import deque\n    >>> d = deque([1, 2, 3, 2, 1])\n    >>> popwhile(lambda x: x < 3, d, side='left')\n    deque([1, 2])\n    >>> d\n    deque([3, 2, 1])\n    >>> popwhile(lambda x: x < 3, d, side='right')\n    deque([2, 1])\n    >>> d\n    deque([3])\n    \"\"\"\n    if side not in ('left', 'right'):\n        raise ValueError(\"`side` must be one of 'left' or 'right'\")\n\n    out = deque()\n\n    if side == 'left':\n        popnext = queue.popleft\n        pushnext = out.append\n        nextidx = 0\n    else:\n        popnext = queue.pop\n        pushnext = out.appendleft\n        nextidx = -1\n\n    while queue:\n        if not cond(queue[nextidx]):\n            break\n        pushnext(popnext())\n    return out\n\n\ndef _current_test():\n    \"\"\"\n    Get the string passed to the currently running call to\n    `test_decompiler.check.`\n\n    This is intended for use in debugging tests.  It should never be called in\n    real code.\n    \"\"\"\n    from codetransformer.tests.test_decompiler import _current_test as ct\n    return ct\n"
  },
  {
    "path": "codetransformer/decompiler/__init__.py",
    "content": "import sys\n\nfrom ..code import Flag\n\n\ndef paramnames(co):\n    \"\"\"\n    Get the parameter names from a pycode object.\n\n    Returns a 4-tuple of (args, kwonlyargs, varargs, varkwargs).\n    varargs and varkwargs will be None if the function doesn't take *args or\n    **kwargs, respectively.\n    \"\"\"\n    flags = co.co_flags\n    varnames = co.co_varnames\n\n    argcount, kwonlyargcount = co.co_argcount, co.co_kwonlyargcount\n    total = argcount + kwonlyargcount\n\n    args = varnames[:argcount]\n    kwonlyargs = varnames[argcount:total]\n    varargs, varkwargs = None, None\n    if flags & Flag.CO_VARARGS:\n        varargs = varnames[total]\n        total += 1\n    if flags & Flag.CO_VARKEYWORDS:\n        varkwargs = varnames[total]\n\n    return args, kwonlyargs, varargs, varkwargs\n\n\nif sys.version_info[:3] == (3, 4, 3):\n    from ._343 import *  # noqa\n"
  },
  {
    "path": "codetransformer/instructions.py",
    "content": "from abc import ABCMeta, abstractmethod\nfrom dis import opname, opmap, hasjabs, hasjrel, HAVE_ARGUMENT, stack_effect\nfrom enum import (\n    IntEnum,\n    unique,\n)\nfrom operator import attrgetter\nfrom re import escape\n\nfrom .patterns import matchable\nfrom .utils.immutable import immutableattr\nfrom .utils.no_default import no_default\n\n\n__all__ = ['Instruction'] + sorted(list(opmap))\n\n# The instructions that use the co_names tuple.\n_uses_name = frozenset({\n    'DELETE_ATTR',\n    'DELETE_GLOBAL',\n    'DELETE_NAME',\n    'IMPORT_FROM',\n    'IMPORT_NAME',\n    'LOAD_ATTR',\n    'LOAD_GLOBAL',\n    'LOAD_NAME',\n    'STORE_ATTR',\n    'STORE_GLOBAL',\n    'STORE_NAME',\n})\n# The instructions that use the co_varnames tuple.\n_uses_varname = frozenset({\n    'LOAD_FAST',\n    'STORE_FAST',\n    'DELETE_FAST',\n})\n# The instructions that use the co_freevars tuple.\n_uses_free = frozenset({\n    'DELETE_DEREF',\n    'LOAD_CLASSDEREF',\n    'LOAD_CLOSURE',\n    'LOAD_DEREF',\n    'STORE_DEREF',\n})\n\n\ndef _notimplemented(name):\n    @property\n    @abstractmethod\n    def _(self):\n        raise NotImplementedError(name)\n    return _\n\n\n@property\ndef _vartype(self):\n    try:\n        return self._vartype\n    except AttributeError:\n        raise AttributeError(\n            \"vartype is not available on instructions \"\n            \"constructed outside of a Code object.\"\n        )\n\n\nclass InstructionMeta(ABCMeta, matchable):\n    _marker = object()  # sentinel\n    _type_cache = {}\n\n    def __init__(self, *args, opcode=None):\n        return super().__init__(*args)\n\n    def __new__(mcls, name, bases, dict_, *, opcode=None):\n        try:\n            return mcls._type_cache[opcode]\n        except KeyError:\n            pass\n\n        if len(bases) != 1:\n            raise TypeError(\n                '{} does not support multiple inheritance'.format(\n                    mcls.__name__,\n                ),\n            )\n\n        if bases[0] is mcls._marker:\n            dict_['_reprname'] = immutableattr(name)\n            for attr in ('absjmp', 'have_arg', 'opcode', 'opname', 'reljmp'):\n                dict_[attr] = _notimplemented(attr)\n            return super().__new__(mcls, name, (object,), dict_)\n\n        if opcode not in opmap.values():\n            raise TypeError('Invalid opcode: {}'.format(opcode))\n\n        opname_ = opname[opcode]\n        dict_['opname'] = dict_['_reprname'] = immutableattr(opname_)\n        dict_['opcode'] = immutableattr(opcode)\n\n        absjmp = opcode in hasjabs\n        reljmp = opcode in hasjrel\n        dict_['absjmp'] = immutableattr(absjmp)\n        dict_['reljmp'] = immutableattr(reljmp)\n        dict_['is_jmp'] = immutableattr(absjmp or reljmp)\n\n        dict_['uses_name'] = immutableattr(opname_ in _uses_name)\n        dict_['uses_varname'] = immutableattr(opname_ in _uses_varname)\n        dict_['uses_free'] = immutableattr(opname_ in _uses_free)\n        if opname_ in _uses_free:\n            dict_['vartype'] = _vartype\n\n        dict_['have_arg'] = immutableattr(opcode >= HAVE_ARGUMENT)\n\n        cls = mcls._type_cache[opcode] = super().__new__(\n            mcls, opname[opcode], bases, dict_,\n        )\n        return cls\n\n    def mcompile(self):\n        return escape(bytes((self.opcode,)))\n\n    def __repr__(self):\n        return self._reprname\n    __str__ = __repr__\n\n\nclass Instruction(InstructionMeta._marker, metaclass=InstructionMeta):\n    \"\"\"\n    Base class for all instruction types.\n\n    Parameters\n    ----------\n    arg : any, optional\n\n        The argument for the instruction. This should be the actual value of\n        the argument, for example, if this is a\n        :class:`~codetransformer.instructions.LOAD_CONST`, use the constant\n        value, not the index that would appear in the bytecode.\n    \"\"\"\n    _no_arg = no_default\n\n    def __init__(self, arg=_no_arg):\n        if self.have_arg and arg is self._no_arg:\n            raise TypeError(\n                \"{} missing 1 required argument: 'arg'\".format(self.opname),\n            )\n        self.arg = self._normalize_arg(arg)\n        self._target_of = set()\n        self._stolen_by = None  # used for lnotab recalculation\n\n    def __repr__(self):\n        arg = self.arg\n        return '{op}{arg}'.format(\n            op=self.opname,\n            arg='(%r)' % arg if self.arg is not self._no_arg else '',\n        )\n\n    @staticmethod\n    def _normalize_arg(arg):\n        return arg\n\n    def steal(self, instr):\n        \"\"\"Steal the jump index off of `instr`.\n\n        This makes anything that would have jumped to `instr` jump to\n        this Instruction instead.\n\n        Parameters\n        ----------\n        instr : Instruction\n            The instruction to steal the jump sources from.\n\n        Returns\n        -------\n        self : Instruction\n            The instruction that owns this method.\n\n        Notes\n        -----\n        This mutates self and ``instr`` inplace.\n        \"\"\"\n        instr._stolen_by = self\n        for jmp in instr._target_of:\n            jmp.arg = self\n        self._target_of = instr._target_of\n        instr._target_of = set()\n        return self\n\n    @classmethod\n    def from_opcode(cls, opcode, arg=_no_arg):\n        \"\"\"\n        Create an instruction from an opcode and raw argument.\n\n        Parameters\n        ----------\n        opcode : int\n            Opcode for the instruction to create.\n        arg : int, optional\n            The argument for the instruction.\n\n        Returns\n        -------\n        intsr : Instruction\n            An instance of the instruction named by ``opcode``.\n        \"\"\"\n        return type(cls)(opname[opcode], (cls,), {}, opcode=opcode)(arg)\n\n    @property\n    def stack_effect(self):\n        \"\"\"\n        The net effect of executing this instruction on the interpreter stack.\n\n        Instructions that pop values off the stack have negative stack effect\n        equal to the number of popped values.\n\n        Instructions that push values onto the stack have positive stack effect\n        equal to the number of popped values.\n\n        Examples\n        --------\n        - LOAD_{FAST,NAME,GLOBAL,DEREF} push one value onto the stack.\n          They have a stack_effect of 1.\n        - POP_JUMP_IF_{TRUE,FALSE} always pop one value off the stack.\n          They have a stack effect of -1.\n        - BINARY_* instructions pop two instructions off the stack, apply a\n          binary operator, and push the resulting value onto the stack.\n          They have a stack effect of -1 (-2 values consumed + 1 value pushed).\n        \"\"\"\n        if self.opcode == NOP.opcode:  # noqa\n            # dis.stack_effect is broken here\n            return 0\n\n        return stack_effect(\n            self.opcode,\n            *((self.arg if isinstance(self.arg, int) else 0,)\n              if self.have_arg else ())\n        )\n\n    def equiv(self, instr):\n        \"\"\"Check equivalence of instructions. This checks against the types\n        and the arguments of the instructions\n\n        Parameters\n        ----------\n        instr : Instruction\n            The instruction to check against.\n\n        Returns\n        -------\n        is_equiv : bool\n            If the instructions are equivalent.\n\n        Notes\n        -----\n        This is a separate concept from instruction identity. Two separate\n        instructions can be equivalent without being the same exact instance.\n        This means that two equivalent instructions can be at different points\n        in the bytecode or be targeted by different jumps.\n        \"\"\"\n        return type(self) == type(instr) and self.arg == instr.arg\n\n\nclass _RawArg(int):\n    \"\"\"A class to hold arguments that are not yet initialized so that they\n    don't break subclass's type checking code.\n\n    This is used in the first pass of instruction creating in Code.from_pycode.\n    \"\"\"\n\n\ndef _mk_call_init(class_):\n    \"\"\"Create an __init__ function for a call type instruction.\n\n    Parameters\n    ----------\n    class_ : type\n        The type to bind the function to.\n\n    Returns\n    -------\n    __init__ : callable\n        The __init__ method for the class.\n    \"\"\"\n    def __init__(self, packed=no_default, *, positional=0, keyword=0):\n        if packed is no_default:\n            arg = int.from_bytes(bytes((positional, keyword)), 'little')\n        elif not positional and not keyword:\n            arg = packed\n        else:\n            raise TypeError('cannot specify packed and unpacked arguments')\n        self.positional, self.keyword = arg.to_bytes(2, 'little')\n        super(class_, self).__init__(arg)\n\n    return __init__\n\n\ndef _call_repr(self):\n    return '%s(positional=%d, keyword=%d)' % (\n        type(self).__name__,\n        self.positional,\n        self.keyword,\n    )\n\n\ndef _check_jmp_arg(self, arg):\n    if not isinstance(arg, (Instruction, _RawArg)):\n        raise TypeError(\n            'argument to %s must be an instruction, got: %r' % (\n                type(self).__name__, arg,\n            ),\n        )\n    if isinstance(arg, Instruction):\n        arg._target_of.add(self)\n    return arg\n\n\nclass CompareOpMeta(InstructionMeta):\n    \"\"\"\n    Special-case metaclass for the COMPARE_OP instruction type that provides\n    default constructors for the various kinds of comparisons.\n\n    These default constructors are implemented as descriptors so that we can\n    write::\n\n        new_compare = COMPARE_OP.LT\n\n    and have it be equivalent to::\n\n        new_compare = COMPARE_OP(COMPARE_OP.comparator.LT)\n    \"\"\"\n\n    @unique\n    class comparator(IntEnum):\n        LT = 0\n        LE = 1\n        EQ = 2\n        NE = 3\n        GT = 4\n        GE = 5\n        IN = 6\n        NOT_IN = 7\n        IS = 8\n        IS_NOT = 9\n        EXCEPTION_MATCH = 10\n\n        def __repr__(self):\n            return '<COMPARE_OP.%s.%s: %r>' % (\n                self.__class__.__name__, self._name_, self._value_,\n            )\n\n    class ComparatorDescr:\n        \"\"\"\n        A descriptor on the **metaclass** of COMPARE_OP that constructs new\n        instances of COMPARE_OP on attribute access.\n\n        Parameters\n        ----------\n        op : comparator\n            The element of the `comparator` enum that this descriptor will\n            forward to the COMPARE_OP constructor.\n        \"\"\"\n        def __init__(self, op):\n            self._op = op\n\n        def __get__(self, instance, owner):\n            # Since this descriptor is added to the current metaclass,\n            # ``instance`` here is the COMPARE_OP **class**.\n\n            if instance is None:\n                # If someone does `CompareOpMeta.LT`, give them back the\n                # descriptor object itself.\n                return self\n\n            # If someone does `COMPARE_OP.LT`, return a **new instance** of\n            # COMPARE_OP.\n            # We create new instances so that consumers can take ownership\n            # without worrying about other jumps targeting the new instruction.\n            return instance(self._op)\n\n    # Dynamically add an instance of ComparatorDescr for each comparator\n    # opcode.\n    # This is equivalent to doing:\n    # LT = ComparatorDescr(comparator.LT)\n    # GT = ComparatorDescr(comparator.GT)\n    # ...\n    for c in comparator:\n        locals()[c._name_] = ComparatorDescr(c)\n    del c\n    del ComparatorDescr\n\n\nmetamap = {\n    'COMPARE_OP': CompareOpMeta,\n}\n\n\nglobals_ = globals()\nfor name, opcode in opmap.items():\n    globals_[name] = class_ = metamap.get(name, InstructionMeta)(\n        opname[opcode],\n        (Instruction,), {\n            '__module__': __name__,\n            '__qualname__': '.'.join((__name__, name)),\n        },\n        opcode=opcode,\n    )\n    if name.startswith('CALL_FUNCTION'):\n        class_.__init__ = _mk_call_init(class_)\n        class_.__repr__ = _call_repr\n\n    if name == 'COMPARE_OP':\n        class_._normalize_arg = staticmethod(class_.comparator)\n\n    if class_.is_jmp:\n        class_._normalize_arg = _check_jmp_arg\n\n    class_.__doc__ = (\n        \"\"\"\n        See Also\n        --------\n        dis.{name}\n        \"\"\".format(name=name),\n    )\n\n    del class_\n\n\n# Clean up the namespace\ndel name\ndel globals_\ndel metamap\ndel _check_jmp_arg\ndel _call_repr\ndel _mk_call_init\n\n# The instructions that use the co_names tuple.\nuses_name = frozenset(\n    filter(attrgetter('uses_name'), Instruction.__subclasses__()),\n)\n# The instructions that use the co_varnames tuple.\nuses_varname = frozenset(\n    filter(attrgetter('uses_varname'), Instruction.__subclasses__()),\n)\n# The instructions that use the co_freevars tuple.\nuses_free = frozenset(\n    filter(attrgetter('uses_free'), Instruction.__subclasses__()),\n)\n"
  },
  {
    "path": "codetransformer/patterns.py",
    "content": "from operator import methodcaller, index, attrgetter\nimport re\nfrom types import MethodType\n\nfrom .utils.instance import instance\nfrom .utils.immutable import immutable\n\n\n#: The default startcode for patterns.\nDEFAULT_STARTCODE = 0\nmcompile = methodcaller('mcompile')\n\n\ndef _prepr(m):\n    if isinstance(m, or_):\n        return '(%r)' % m\n\n    return repr(m)\n\n\ndef coerce_ellipsis(p):\n    \"\"\"Convert ... into a matchany\n    \"\"\"\n    if p is ...:\n        return matchany\n\n    return p\n\n\nclass matchable:\n    \"\"\"Mixin for defining the operators on patterns.\n    \"\"\"\n    def __or__(self, other):\n        other = coerce_ellipsis(other)\n        if self is other:\n            return self\n\n        if not isinstance(other, matchable):\n            return NotImplemented\n\n        patterns = []\n        if isinstance(self, or_):\n            patterns.extend(self.matchables)\n        else:\n            patterns.append(self)\n        if isinstance(other, or_):\n            patterns.extend(other.matchables)\n        else:\n            patterns.append(other)\n\n        return or_(*patterns)\n\n    def __ror__(self, other):\n        # Flip the order on the or method\n        if not isinstance(other, matchable):\n            return NotImplemented\n\n        return type(self).__or__(coerce_ellipsis(other), self)\n\n    def __invert__(self):\n        return not_(self)\n\n    def __getitem__(self, key):\n        try:\n            n = index(key)\n        except TypeError:\n            pass\n        else:\n            return matchrange(self, n)\n\n        if isinstance(key, tuple) and len(key) in (1, 2):\n            return matchrange(self, *key)\n\n        if isinstance(key, modifier):\n            return postfix_modifier(self, key)\n\n        raise TypeError('invalid modifier: {0}'.format(key))\n\n\nclass postfix_modifier(immutable, matchable):\n    \"\"\"A pattern with a modifier paired with it.\n    \"\"\"\n    __slots__ = 'matchable', 'modifier'\n\n    def mcompile(self):\n        return self.matchable.mcompile() + self.modifier.mcompile()\n\n    def __repr__(self):\n        return '%r[%r]' % (self.matchable, self.modifier)\n    __str__ = __repr__\n\n\nclass meta(matchable):\n    \"\"\"Class for meta patterns and pattern likes. for example: ``matchany``.\n    \"\"\"\n    def mcompile(self):\n        return self._token\n\n    def __repr__(self):\n        return self._token.decode('utf-8')\n    __str__ = __repr__\n\n\nclass modifier(meta):\n    \"\"\"Marker class for modifier types.\n    \"\"\"\n    pass\n\n\n@instance\nclass var(modifier):\n    \"\"\"Modifier that matches zero or more of a pattern.\n    \"\"\"\n    _token = b'*'\n\n\n@instance\nclass plus(modifier):\n    \"\"\"Modifier that matches one or more of a pattern.\n    \"\"\"\n    _token = b'+'\n\n\n@instance\nclass option(modifier):\n    \"\"\"Modifier that matches zero or one of a pattern.\n    \"\"\"\n    _token = b'?'\n\n\nclass matchrange(immutable, meta, defaults={'m': None}):\n    __slots__ = 'matchable', 'n', 'm'\n\n    def mcompile(self):\n        m = self.m\n        return (\n            self.matchable.mcompile() +\n            b'{' +\n            bytes(str(self.n), 'utf-8') +\n            b',' + (b'' if m is None else (b', ' + bytes(str(m), 'utf-8'))) +\n            b'}'\n        )\n\n    def __repr__(self):\n        return '{matchable}[{args}]'.format(\n            matchable=_prepr(self.matchable),\n            args=', '.join(map(str, filter(bool, (self.n, self.m)))),\n        )\n\n\n@instance\nclass matchany(meta):\n    \"\"\"Matchable that matches any instruction.\n    \"\"\"\n    _token = b'.'\n\n    def __repr__(self):\n        return '...'\n\n\nclass seq(immutable, matchable):\n    \"\"\"A sequence of matchables to match in order.\n\n    Parameters\n    ----------\n    \\*matchables : iterable of matchable\n        The matchables to match against.\n    \"\"\"\n    __slots__ = 'matchables',\n\n    def __new__(cls, *matchables):\n        if not matchables:\n            raise TypeError('cannot create an empty sequence')\n\n        if len(matchables) == 1:\n            return coerce_ellipsis(matchables[0])\n        return super().__new__(cls)\n\n    def __init__(self, *matchables):\n        self.matchables = tuple(map(coerce_ellipsis, matchables))\n\n    def mcompile(self):\n        return b''.join(map(mcompile, self.matchables))\n\n    def __repr__(self):\n        return '{cls}({args})'.format(\n            cls=type(self).__name__,\n            args=', '.join(map(_prepr, self.matchables))\n        )\n\n\nclass or_(immutable, matchable):\n    \"\"\"Logical or of multiple matchables.\n\n    Parameters\n    ----------\n    *matchables : iterable of matchable\n        The matchables to or together.\n    \"\"\"\n    __slots__ = '*matchables',\n\n    def mcompile(self):\n        return b'(' + b'|'.join(map(mcompile, self.matchables)) + b')'\n\n    def __repr__(self):\n        return ' | '.join(map(_prepr, self.matchables))\n\n\nclass not_(immutable, matchable):\n    \"\"\"Logical not of a matchable.\n    \"\"\"\n    __slots__ = 'matchable',\n\n    def mcompile(self):\n        matchable = self.matchable\n        if isinstance(matchable, (seq, or_, not_)):\n            return b'((?!(' + matchable.mcompile() + b')).)*'\n\n        return b'[^' + matchable.mcompile() + b']'\n\n    def __repr__(self):\n        return '~' + _prepr(self.matchable)\n\n\nclass pattern(immutable):\n    \"\"\"\n    A pattern of instructions that can be matched against.\n\n    This class is intended to be used as a decorator on methods of\n    CodeTransformer subclasses.  It is used to mark that a given method should\n    be called on sequences of instructions that match the pattern described by\n    the inputs.\n\n    Parameters\n    ----------\n    \\*matchables : iterable of matchable\n        The type of instructions to match against.\n    startcodes : container of any\n        The startcodes where this pattern should be tried.\n\n    Examples\n    --------\n    Match a single BINARY_ADD instruction::\n\n        pattern(BINARY_ADD)\n\n    Match a single BINARY_ADD followed by a RETURN_VALUE::\n\n        pattern(BINARY_ADD, RETURN_VALUE)\n\n    Match a single BINARY_ADD followed by any other single instruction::\n\n        pattern(BINARY_ADD, matchany)\n\n    Match a single BINARY_ADD followed by any number of instructions::\n\n        pattern(BINARY_ADD, matchany[var])\n    \"\"\"\n    __slots__ = 'matchable', 'startcodes', '_compiled'\n\n    def __init__(self, *matchables, startcodes=(DEFAULT_STARTCODE,)):\n        if not matchables:\n            raise TypeError('expected at least one matchable')\n        self.matchable = matchable = seq(*matchables)\n        self.startcodes = startcodes\n        self._compiled = re.compile(matchable.mcompile())\n\n    def __call__(self, f):\n        return boundpattern(self._compiled, self.startcodes, f)\n\n    def __repr__(self):\n        return '{cls}(matchable={m!r}, startcodes={s})'.format(\n            cls=type(self).__name__,\n            m=self.matchable,\n            s=self.startcodes,\n        )\n\n\nclass boundpattern(immutable):\n    \"\"\"A pattern bound to a function.\n    \"\"\"\n    __slots__ = '_compiled', '_startcodes', '_f'\n\n    def __get__(self, instance, owner):\n        if instance is None:\n            return self\n\n        return type(self)(\n            self._compiled,\n            self._startcodes,\n            MethodType(self._f, instance)\n        )\n\n    def __call__(self, compiled_instrs, instrs, startcode):\n        if startcode not in self._startcodes:\n            raise NoMatch(compiled_instrs, startcode)\n\n        match = self._compiled.match(compiled_instrs)\n        if match is None or match.end is 0:\n            raise NoMatch(compiled_instrs, startcode)\n\n        mend = match.end()\n        return self._f(*instrs[:mend]), mend\n\n\nclass NoMatch(Exception):\n    \"\"\"Indicates that there was no match found in this dispatcher.\n    \"\"\"\n    pass\n\n\nclass patterndispatcher(immutable):\n    \"\"\"A set of patterns that can dispatch onto instrs.\n    \"\"\"\n    __slots__ = '*patterns',\n\n    def __get__(self, instance, owner):\n        if instance is None:\n            return self\n\n        return boundpatterndispatcher(\n            instance,\n            *map(\n                methodcaller('__get__', instance, owner),\n                self.patterns,\n            )\n        )\n\n\nclass boundpatterndispatcher(immutable):\n    \"\"\"A set of patterns bound to a transformer.\n    \"\"\"\n    __slots__ = 'transformer', '*patterns'\n\n    def _dispatch(self, compiled_instrs, instrs, startcode):\n        for p in self.patterns:\n            try:\n                return p(compiled_instrs, instrs, startcode)\n            except NoMatch:\n                pass\n\n        raise NoMatch(instrs, startcode)\n\n    def __call__(self, instrs):\n        opcodes = bytes(map(attrgetter('opcode'), instrs))\n        idx = 0  # The current index into the pre-transformed instrs.\n        post_transform = []  # The instrs that have been transformed.\n        transformer = self.transformer\n        while idx < len(instrs):\n            try:\n                processed, nconsumed = self._dispatch(\n                    opcodes[idx:],\n                    instrs[idx:],\n                    # NOTE: do not remove this attribute access\n                    # self._dispatch can mutate the value of the startcode\n                    transformer.startcode,\n                )\n            except NoMatch:\n                post_transform.append(instrs[idx])\n                idx += 1\n            else:\n                post_transform.extend(processed)\n                idx += nconsumed\n        return tuple(post_transform)\n"
  },
  {
    "path": "codetransformer/tests/__init__.py",
    "content": ""
  },
  {
    "path": "codetransformer/tests/test_code.py",
    "content": "from dis import dis\nfrom io import StringIO\nfrom itertools import product, chain\nimport random\nimport sys\n\nimport pytest\n\nfrom codetransformer.code import Code, Flag, pycode\nfrom codetransformer.instructions import LOAD_CONST, LOAD_FAST, uses_free\n\n\n@pytest.fixture(scope='module')\ndef sample_flags(request):\n    random.seed(8025816322119661921)  # ayy lmao\n    nflags = len(Flag.__members__)\n    return tuple(\n        dict(zip(Flag.__members__.keys(), case)) for case in chain(\n            random.sample(list(product((True, False), repeat=nflags)), 1000),\n            [[True] * nflags],\n            [[False] * nflags],\n        )\n    )\n\n\ndef test_lnotab_roundtrip():\n    # DO NOT ADD EXTRA LINES HERE\n    def f():  # pragma: no cover\n        a = 1\n        b = 2\n        c = 3\n        d = 4\n        a, b, c, d\n\n    start_line = test_lnotab_roundtrip.__code__.co_firstlineno + 3\n    lines = [start_line + n for n in range(5)]\n    code = Code.from_pycode(f.__code__)\n    lnotab = code.lnotab\n    assert lnotab.keys() == set(lines)\n    assert isinstance(lnotab[lines[0]], LOAD_CONST)\n    assert lnotab[lines[0]].arg == 1\n    assert isinstance(lnotab[lines[1]], LOAD_CONST)\n    assert lnotab[lines[1]].arg == 2\n    assert isinstance(lnotab[lines[2]], LOAD_CONST)\n    assert lnotab[lines[2]].arg == 3\n    assert isinstance(lnotab[lines[3]], LOAD_CONST)\n    assert lnotab[lines[3]].arg == 4\n    assert isinstance(lnotab[lines[4]], LOAD_FAST)\n    assert lnotab[lines[4]].arg == 'a'\n    assert f.__code__.co_lnotab == code.py_lnotab == code.to_pycode().co_lnotab\n\n\ndef test_lnotab_really_dumb_whitespace():\n    ns = {}\n    exec('def f():\\n    lol = True' + '\\n' * 1024 + '    wut = True', ns)\n    f = ns['f']\n    code = Code.from_pycode(f.__code__)\n    lines = [2, 1026]\n    lnotab = code.lnotab\n    assert lnotab.keys() == set(lines)\n    assert isinstance(lnotab[lines[0]], LOAD_CONST)\n    assert lnotab[lines[0]].arg\n    assert isinstance(lnotab[lines[1]], LOAD_CONST)\n    assert lnotab[lines[1]].arg\n    assert f.__code__.co_lnotab == code.py_lnotab == code.to_pycode().co_lnotab\n\n\ndef test_flag_packing(sample_flags):\n    for flags in sample_flags:\n        assert Flag.unpack(Flag.pack(**flags)) == flags\n\n\ndef test_flag_unpack_too_big():\n    assert all(Flag.unpack(Flag.max).values())\n    with pytest.raises(ValueError):\n        Flag.unpack(Flag.max + 1)\n\n\ndef test_flag_max():\n    assert Flag.pack(\n        CO_OPTIMIZED=True,\n        CO_NEWLOCALS=True,\n        CO_VARARGS=True,\n        CO_VARKEYWORDS=True,\n        CO_NESTED=True,\n        CO_GENERATOR=True,\n        CO_NOFREE=True,\n        CO_COROUTINE=True,\n        CO_ITERABLE_COROUTINE=True,\n        CO_FUTURE_DIVISION=True,\n        CO_FUTURE_ABSOLUTE_IMPORT=True,\n        CO_FUTURE_WITH_STATEMENT=True,\n        CO_FUTURE_PRINT_FUNCTION=True,\n        CO_FUTURE_UNICODE_LITERALS=True,\n        CO_FUTURE_BARRY_AS_BDFL=True,\n        CO_FUTURE_GENERATOR_STOP=True,\n    ) == Flag.max\n\n\ndef test_flag_max_immutable():\n    with pytest.raises(AttributeError):\n        Flag.CO_OPTIMIZED.max = None\n\n\ndef test_code_multiple_varargs():\n    with pytest.raises(ValueError) as e:\n        Code(\n            (), (\n                '*args',\n                '*other',\n            ),\n        )\n\n    assert str(e.value) == 'cannot specify *args more than once'\n\n\ndef test_code_multiple_kwargs():\n    with pytest.raises(ValueError) as e:\n        Code(\n            (), (\n                '**kwargs',\n                '**kwargs',\n            ),\n        )\n\n    assert str(e.value) == 'cannot specify **kwargs more than once'\n\n\n@pytest.mark.parametrize('cls', uses_free)\ndef test_dangling_var(cls):\n    instr = cls('dangling')\n    with pytest.raises(ValueError) as e:\n        Code((instr,))\n\n    assert (\n        str(e.value) ==\n        \"Argument to %r is not in cellvars or freevars.\" % instr\n    )\n\n\ndef test_code_flags(sample_flags):\n    attr_map = {\n        'CO_NESTED': 'is_nested',\n        'CO_GENERATOR': 'is_generator',\n        'CO_COROUTINE': 'is_coroutine',\n        'CO_ITERABLE_COROUTINE': 'is_iterable_coroutine',\n        'CO_NEWLOCALS': 'constructs_new_locals',\n    }\n    for flags in sample_flags:\n        if sys.version_info < (3, 6):\n            codestring = b'd\\x00\\x00S'  # return None\n        else:\n            codestring = b'd\\x00S'  # return None\n\n        code = Code.from_pycode(pycode(\n            argcount=0,\n            kwonlyargcount=0,\n            nlocals=2,\n            stacksize=0,\n            flags=Flag.pack(**flags),\n            codestring=codestring,\n            constants=(None,),\n            names=(),\n            varnames=('a', 'b'),\n            filename='',\n            name='',\n            firstlineno=0,\n            lnotab=b'',\n        ))\n        assert code.flags == flags\n        for flag, attr in attr_map.items():\n            if flags[flag]:\n                assert getattr(code, attr)\n\n\n@pytest.fixture\ndef abc_code():\n    a = LOAD_CONST('a')\n    b = LOAD_CONST('b')\n    c = LOAD_CONST('c')  # not in instrs\n    code = Code((a, b), argnames=())\n\n    return (a, b, c), code\n\n\ndef test_instr_index(abc_code):\n    (a, b, c), code = abc_code\n\n    assert code.index(a) == 0\n    assert code.index(b) == 1\n\n    with pytest.raises(ValueError):\n        code.index(c)\n\n\ndef test_code_contains(abc_code):\n    (a, b, c), code = abc_code\n\n    assert a in code\n    assert b in code\n    assert c not in code\n\n\ndef test_code_dis(capsys):\n    @Code.from_pyfunc\n    def code():  # pragma: no cover\n        a = 1\n        b = 2\n        return a, b\n\n    buf = StringIO()\n    dis(code.to_pycode(), file=buf)\n    expected = buf.getvalue()\n\n    code.dis()\n    out, err = capsys.readouterr()\n    assert not err\n    assert out == expected\n\n    buf = StringIO()\n    code.dis(file=buf)\n    assert buf.getvalue() == expected\n"
  },
  {
    "path": "codetransformer/tests/test_core.py",
    "content": "import pytest\nimport toolz.curried.operator as op\n\nfrom codetransformer import CodeTransformer, Code, pattern\nfrom codetransformer.core import Context, NoContext\nfrom codetransformer.instructions import Instruction\nfrom codetransformer.patterns import DEFAULT_STARTCODE\nfrom codetransformer.utils.instance import instance\n\n\ndef test_inherit_patterns():\n    class C(CodeTransformer):\n        matched = False\n\n        @pattern(...)\n        def _(self, instr):\n            self.matched = True\n            yield instr\n\n    class D(C):\n        pass\n\n    d = D()\n    assert not d.matched\n\n    @d\n    def f():\n        pass\n\n    assert d.matched\n\n\ndef test_override_patterns():\n    class C(CodeTransformer):\n        matched_super = False\n        matched_sub = False\n\n        @pattern(...)\n        def _(self, instr):\n            self.matched_super = True\n            yield instr\n\n    class D(C):\n        @pattern(...)\n        def _(self, instr):\n            self.matched_sub = True\n            yield instr\n\n    d = D()\n    assert not d.matched_super\n    assert not d.matched_sub\n\n    @d\n    def f():\n        pass\n\n    assert d.matched_sub\n    assert not d.matched_super\n\n\ndef test_updates_lnotab():\n    @instance\n    class c(CodeTransformer):\n        @pattern(...)\n        def _(self, instr):\n            yield type(instr)(instr.arg).steal(instr)\n\n    def f():  # pragma: no cover\n        # this function has irregular whitespace for testing the lnotab\n        a = 1\n        # intentional line\n        b = 2\n        # intentional line\n        c = 3\n        # intentional line\n        return a, b, c\n\n    original = Code.from_pyfunc(f)\n    post_transform = c.transform(original)\n\n    # check that something happened\n    assert original.lnotab != post_transform.lnotab\n    # check that we preserved the line numbers\n    assert (\n        original.lnotab.keys() ==\n        post_transform.lnotab.keys() ==\n        set(map(op.add(original.firstlineno), (2, 4, 6, 8)))\n    )\n\n    def sorted_instrs(lnotab):\n        order = sorted(lnotab.keys())\n        for idx in order:\n            yield lnotab[idx]\n\n    # check that the instrs are correct\n    assert all(map(\n        Instruction.equiv,\n        sorted_instrs(original.lnotab),\n        sorted_instrs(post_transform.lnotab),\n    ))\n\n    # sanity check that the function is correct\n    assert f() == c(f)()\n\n\ndef test_context():\n    def f():  # pragma: no cover\n        pass\n\n    code = Code.from_pyfunc(f)\n    c = Context(code)\n\n    # check default attributes\n    assert c.code is code\n    assert c.startcode == DEFAULT_STARTCODE\n\n    # check that the object acts like a namespace\n    c.attr = 'test'\n    assert c.attr == 'test'\n\n\ndef test_no_context():\n    @instance\n    class c(CodeTransformer):\n        pass\n\n    with pytest.raises(NoContext) as e:\n        c.context\n\n    assert str(e.value) == 'no active transformation context'\n"
  },
  {
    "path": "codetransformer/tests/test_decompiler.py",
    "content": "\"\"\"\nTests for decompiler.py\n\"\"\"\nfrom ast import AST, iter_fields, Module, parse\nfrom functools import partial\nfrom itertools import product, zip_longest, combinations_with_replacement\nimport sys\nfrom textwrap import dedent\n\nimport pytest\nfrom toolz.curried.operator import add\n\nfrom codetransformer import a as show  # noqa\n\n_343 = sys.version_info[:3] == (3, 4, 3)\npytestmark = pytest.mark.skipif(\n    not _343,\n    reason='decompiler only runs on 3.4',\n)\nif _343:\n    from ..decompiler import (\n        DecompilationContext,\n        decompile,\n        paramnames,\n        pycode_to_body,\n    )\n\n_current_test = None\n\n\ndef make_indented_body(body_str):\n    \"\"\"\n    Helper for generating an indented string to use as the body of a function.\n    \"\"\"\n    return '\\n'.join(\n        map(\n            add(\"    \"),\n            dedent(body_str).splitlines(),\n        )\n    )\n\n\ndef compare(computed, expected):\n    \"\"\"\n    Assert that two AST nodes are the same.\n    \"\"\"\n    assert type(computed) == type(expected)\n\n    if isinstance(computed, list):\n        for cv, ev in zip_longest(computed, expected):\n            compare(cv, ev)\n        return\n\n    if not isinstance(computed, AST):\n        assert computed == expected\n        return\n\n    for (cn, cv), (en, ev) in zip_longest(*map(iter_fields,\n                                               (computed, expected))):\n        assert cn == en\n        compare(cv, ev)\n\n\ndef check(text, ast_text=None):\n    \"\"\"\n    Check that compiling and disassembling `text` produces the same AST tree as\n    calling ast.parse on `ast_text`.  If `ast_text` is not passed, use `text`\n    for both.\n    \"\"\"\n    global _current_test\n    _current_test = text\n\n    if ast_text is None:\n        ast_text = text\n\n    ast = parse(ast_text)\n\n    code = compile(text, '<test>', 'exec')\n\n    decompiled_ast = Module(\n        body=pycode_to_body(code, DecompilationContext()),\n    )\n\n    compare(decompiled_ast, ast)\n\n\ndef check_formatted(text, ast_text=None, **fmt_kwargs):\n    text = text.format(**fmt_kwargs)\n    if ast_text is not None:\n        ast_text = ast_text.format(**fmt_kwargs)\n    check(text, ast_text)\n\n\n# Bodies for for/while loops.\nLOOP_BODIES = tuple(map(\n    '\\n'.join,\n    combinations_with_replacement(\n        [\n            \"x = 1\",\n            \"break\",\n            \"continue\",\n            dedent(\n                \"\"\"\\\n                while u + v:\n                    w = z\n                \"\"\",\n            ),\n            dedent(\n                \"\"\"\\\n                for u in v:\n                    w = z\n                \"\"\",\n            ),\n        ],\n        3,\n    ),\n))\n# Bodies for for-else/while-else blocks.\nORELSE_BODIES = [\"\", \"x = 3\"]\n# LHS of assignment, or bindings in a for-loop.\nNAME_BINDS = [\n    \"a\",\n    \"(a, b)\",\n    \"(a,)\",\n    \"a, ((b, c, d), (e, f))\",\n]\n\n\ndef test_decompile():\n    def foo(a, b, *, c):\n        return a + b + c\n    decompiled = decompile(foo)\n\n    # NOTE: We can't reliably match the ast for defaults and annotations, since\n    # we can't tell how they were defined.\n    s = dedent(\n        \"\"\"\n        def foo(a, b, *, c):\n            return a + b + c\n        \"\"\"\n    )\n    compiled = parse(s)\n    compare(decompiled, compiled.body[0])\n\n\ndef test_trivial_expr():\n    check(\"a\")\n\n\n@pytest.mark.parametrize(\n    'lhs,rhs', product(NAME_BINDS, ['x', 'x.y() + z.w()']),\n)\ndef test_assign(lhs, rhs):\n    check(\"{lhs} = {rhs}\".format(lhs=lhs, rhs=rhs))\n\n\ndef test_unpack_to_attribute():\n    check(\"((a.b, c.d.e), f) = g\")\n    check(\"((a[b], c[d][e]), f) = g\")\n    check(\"((a[b].c, d.e[f]), g) = h\")\n\n\ndef test_chained_assign():\n    check(\"a = b = c = d\")\n    check(\"a.b = (c,) = d[e].f = g\")\n    check(\"a.b = (c, d[e].f) = g\")\n\n\ndef test_unary_not():\n    check(\"a = not b\")\n    check(\"a = not not b\")\n    check(\"a = not ((not a) + b)\")\n\n\n@pytest.mark.parametrize(\n    'op', [\n        '+',\n        '-',\n        '*',\n        '**',\n        '/',\n        '//',\n        '%',\n        '<<',\n        '>>',\n        '&',\n        '^',\n        '|',\n    ]\n)\ndef test_binary_ops(op):\n    check(\"a {op} b\".format(op=op))\n    check(\"a = b {op} c\".format(op=op))\n    check(\"a = (b {op} c) {op} d\".format(op=op))\n    check(\"a = b {op} (c {op} d)\".format(op=op))\n\n\ndef test_string_literal():\n    # A string literal as the first expression in a module generates a\n    # STORE_NAME to __doc__.  We can't tell the difference between this and an\n    # actual assignment to __doc__.\n    check(\"'a'\", \"__doc__ = 'a'\")\n    check(\"'abc'\", \"__doc__ = 'abc'\")\n\n    check(\"a = 'a'\")\n    check(\"a = u'a'\")\n\n\ndef test_bytes_literal():\n    check(\"b'a'\")\n    check(\"b'abc'\")\n    check(\"a = b'a'\")\n\n\ndef test_int_literal():\n    check(\"1\", \"\")  # This gets constant-folded out\n    check(\"a = 1\")\n    check(\"a = 1 + b\")\n    check(\"a = b + 1\")\n\n\ndef test_float_literal():\n    check('1.0', \"\")   # This gets constant-folded out\n    check(\"a = 1.0\")\n    check(\"a = 1.0 + b\")\n    check(\"a = b + 1.0\")\n\n\ndef test_complex_literal():\n    check('1.0j', \"\")  # This gets constant-folded out\n    check(\"a = 1.0j\")\n    check(\"a = 1.0j + b\")\n    check(\"a = b + 1.0j\")\n\n\ndef test_tuple_literals():\n    check(\"()\")\n    check(\"(1,)\")\n    check(\"(a,)\")\n    check(\"(1, a)\")\n    check(\"(1, 'a')\")\n    check(\"((1,), a)\")\n    check(\"((1,(b,)), a)\")\n\n\ndef test_set_literals():\n    check(\"{1}\")\n    check(\"{1, 'a'}\")\n    check(\"a = {1, 'a'}\")\n\n\ndef test_list_literals():\n    check(\"[]\")\n    check(\"[1]\")\n    check(\"[a]\")\n    check(\"[[], [a, 1]]\")\n\n\ndef test_dict_literals():\n    check(\"{}\")\n    check(\"{a: b}\")\n    check(\"{a + a: b + b}\")\n    check(\"{a: b, c: d}\")\n    check(\"{1: 2, c: d}\")\n    check(\"{a: {b: c}, d: e}\")\n\n    check(\"{a: {b: {c: d}, e: {f: g}}}\")\n\n    check(\"{a: {b: [c, d, e]}}\")\n    check(\"a + {b: c}\")\n\n\ndef test_function_call():\n    check(\"f()\")\n    check(\"f(a, b, c=1, d=2)\")\n\n    check(\"f(*args)\")\n    check(\"f(a, b=1, *args)\")\n\n    check(\"f(**kwargs)\")\n    check(\"f(a, b=1, **kwargs)\")\n\n    check(\"f(*args, **kwargs)\")\n    check(\"f(a, b=1, *args, **kwargs)\")\n\n    check(\"(a + b)()\")\n    check(\"a().b.c.d()\")\n\n\ndef test_paramnames():\n\n    def foo(a, b):\n        x = 1\n        return x\n\n    args, kwonlyargs, varargs, varkwargs = paramnames(foo.__code__)\n    assert args == ('a', 'b')\n    assert kwonlyargs == ()\n    assert varargs is None\n    assert varkwargs is None\n\n    def bar(a, *, b):\n        x = 1\n        return x\n\n    args, kwonlyargs, varargs, varkwargs = paramnames(bar.__code__)\n    assert args == ('a',)\n    assert kwonlyargs == ('b',)\n    assert varargs is None\n    assert varkwargs is None\n\n    def fizz(a, **kwargs):\n        x = 1\n        return x\n\n    args, kwonlyargs, varargs, varkwargs = paramnames(fizz.__code__)\n    assert args == ('a',)\n    assert kwonlyargs == ()\n    assert varargs is None\n    assert varkwargs == 'kwargs'\n\n    def buzz(a, b=1, *args, c, d=3, **kwargs):\n        x = 1\n        return x\n\n    args, kwonlyargs, varargs, varkwargs = paramnames(buzz.__code__)\n    assert args == ('a', 'b')\n    assert kwonlyargs == ('c', 'd')\n    assert varargs == 'args'\n    assert varkwargs == 'kwargs'\n\n\n@pytest.mark.parametrize(\n    \"signature,expr\",\n    product(\n        [\n            \"\",\n            \"a\",\n            \"a, b\",\n            \"*a, b\",\n            \"a, **b\",\n            \"*a, **b\",\n            \"a=1, b=2, c=3\",\n            \"a, *, b=1, c=2, d=3\",\n            \"a, b=1, c=2, *, d, e=3, f, g=4\",\n            \"a, b=1, *args, c, d=2, **kwargs\",\n            \"a, b=c + d, *, e=f + g\",\n        ],\n        [\n            \"a + b\",\n            \"None\",\n            \"lambda x: lambda y: lambda z: (x, y, z)\",\n            \"[lambda x: a + b, 1]\",\n            \"[(lambda y: a + b) + (lambda z: d + e), 1]\",\n        ],\n    ),\n)\ndef test_lambda(signature, expr):\n    check_formatted(\"lambda {sig}: {expr}\", sig=signature, expr=expr)\n    check_formatted(\"func = (lambda {sig}: {expr})\", sig=signature, expr=expr)\n    check_formatted(\n        dedent(\n            \"\"\"\n            def foo():\n                return (lambda {sig}: {expr})()\n            \"\"\"\n        ),\n        sig=signature,\n        expr=expr,\n    )\n\n\ndef test_simple_function():\n    check(\n        dedent(\n            \"\"\"\\\n            def foo(a, b):\n                return a + b\n            \"\"\"\n        )\n    )\n\n\ndef test_annotations():\n    check(\n        dedent(\n            \"\"\"\\\n            def foo(a: b, c: d):\n                return 3\n            \"\"\"\n        )\n    )\n    check(\n        dedent(\n            \"\"\"\\\n            def foo(a: b, c=1, *args: d, e:f, g:h=i, **kwargs: j):\n                return a + c\n            \"\"\"\n        )\n    )\n    check(\n        dedent(\n            \"\"\"\\\n            def foo(a: b * 3, c=1, *args: d, e:f, g:h=i, **kwargs: j) -> k:\n                return a + c\n            \"\"\"\n        )\n    )\n\n\n@pytest.mark.parametrize(\n    \"signature,body\",\n    product(\n        [\n            \"()\",\n            \"(a)\",\n            \"(a, b)\",\n            \"(*a, b)\",\n            \"(a, **b)\",\n            \"(*a, **b)\",\n            \"(a=1, b=2, c=3)\",\n            \"(a, *, b=1, c=2, d=3)\",\n            \"(a, b=1, c=2, *, d, e=3, f, g=4)\",\n            \"(a, b=1, *args, c, d=2, **kwargs)\",\n            \"(a, b=c + d, *, e=f + g)\",\n        ],\n        [\n            \"\"\"\\\n            return a + b\n            \"\"\",\n            \"\"\"\\\n            x = 1\n            y = 2\n            return x + y\n            \"\"\",\n            \"\"\"\\\n            x = 3\n            def bar(m, n):\n                global x\n                x = 4\n                return m + n + x\n            return None\n            \"\"\",\n            \"\"\"\\\n            def bar():\n                x = 3\n                def buzz():\n                    nonlocal x\n                    x = 4\n                    return x\n                return x\n            return None\n            \"\"\"\n        ],\n    ),\n)\ndef test_function_signatures(signature, body):\n    check(\n        dedent(\n            \"\"\"\\\n            def foo{signature}:\n            {body}\n            \"\"\"\n        ).format(signature=signature, body=make_indented_body(body))\n    )\n\n\ndef test_decorators():\n    check(\n        dedent(\n            \"\"\"\n            @decorator2\n            @decorator1()\n            @decorator0.attr.attr\n            def foo(a, b=1, *, c, d=2):\n                @decorator3\n                def bar(c, d):\n                    x = 1\n                    return None\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_store_twice_to_global():\n    check(\n        dedent(\n            \"\"\"\\\n            x = 3\n            def foo():\n                global x\n                x = 4\n                x = 5\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_store_twice_to_nonlocal():\n    check(\n        dedent(\n            \"\"\"\\\n            def foo():\n                x = 1\n                def bar():\n                    nonlocal x\n                    x = 2\n                    x = 3\n                    return None\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_getattr():\n    check(\"a.b\")\n    check(\"a.b.c\")\n    check(\"a.b.c + a.b.c\")\n\n    check(\"(1).real\")\n    check(\"1..real\")\n\n    check(\"(a + b).c\")\n\n    check(\"a = b.c\")\n\n\ndef test_setattr():\n    check(\"a.b = c\")\n    check(\"a.b.c = d\")\n    check(\"a.b.c = d.e.f\")\n    check(\"(a + b).c = (d + e).f\")\n\n\ndef test_getitem():\n    check(\"a = b[c]\")\n    check(\"a = b[c:]\")\n    check(\"a = b[:c]\")\n    check(\"a = b[c::]\")\n    check(\"a = b[c:d]\")\n    check(\"a = b[c:d:e]\")\n\n    check(\"a = b[c, d]\")\n    check(\"a = b[c:, d]\")\n    check(\"a = b[c:d:e, f:g:h, i:j:k]\")\n\n    check(\"a = b[c + d][e]\")\n\n\ndef test_setitem():\n    check(\"a[b] = c\")\n    check(\"b[c:] = a\")\n    check(\"b[:c] = a\")\n    check(\"b[c::] = a\")\n    check(\"b[c:d] = a\")\n    check(\"b[c:d:e] = a\")\n\n    check(\"b[c, d] = a\")\n    check(\"b[c:, d] = a\")\n    check(\"b[c:d:e, f:g:h, i:j:k] = a\")\n\n    check(\"b[c + d][e] = a\")\n\n\n@pytest.mark.parametrize(\n    \"loop,body,else_body\",\n    product(\n        [\n            \"for a in b:\",\n            \"for a in b.c.d:\",\n            \"for (a, (b, c), d) in e:\"\n        ],\n        LOOP_BODIES,\n        ORELSE_BODIES,\n    )\n)\ndef test_for(loop, body, else_body):\n    check(\n        dedent(\n            \"\"\"\\\n            {loop}\n            {body}\n            {else_}\n            {else_body}\n            x = 4\n            \"\"\"\n        ).format(\n            loop=loop,\n            body=make_indented_body(body),\n            else_=\"else:\" if else_body else \"\",\n            else_body=make_indented_body(else_body) if else_body else \"\",\n        )\n    )\n\n\n@pytest.mark.parametrize(\n    \"condition,body,else_body\",\n    product(\n        [\n            \"a\",\n            \"not a\",\n            \"not not a\",\n            \"a.b.c.d\",\n            \"not a.b.c.d\",\n            \"True\",\n        ],\n        LOOP_BODIES,\n        ORELSE_BODIES,\n    )\n)\ndef test_while(condition, body, else_body):\n    check(\n        dedent(\n            \"\"\"\\\n            while {condition}:\n            {body}\n            {else_}\n            {else_body}\n            x = 4\n            \"\"\"\n        ).format(\n            condition=condition,\n            body=make_indented_body(body),\n            else_=\"else:\" if else_body else \"\",\n            else_body=make_indented_body(else_body) if else_body else \"\",\n        )\n    )\n\n\ndef test_while_False():\n    # The peephole optimizer removes while <falsey constant> blocks entirely.\n    check(\n        dedent(\n            \"\"\"\\\n            while False:\n                x = 1\n                y = 2\n            \"\"\"\n        ),\n        \"\"\n    )\n\n\ndef test_import():\n    check(\"import a as b\")\n    check(\"import a.b as c\")\n    # These generate identical bytecode.\n    check(\n        \"import a, b\",\n        dedent(\n            \"\"\"\\\n            import a\n            import b\n            \"\"\"\n        )\n    )\n    check(\"import a.b.c\")\n    # These generate identical bytecode.\n    check(\n        \"import a.b.c as d, e.f.g as h\",\n        dedent(\n            \"\"\"\n            import a.b.c as d\n            import e.f.g as h\n            \"\"\"\n        )\n    )\n\n\ndef test_import_from():\n    check(\"from a import b\")\n    check(\"from a import b, c as d, d\")\n    check(\"from a.b import c, d as e, f as g\")\n\n\ndef test_import_star():\n    check(\"from a import *\")\n    check(\"from a.b.c import *\")\n\n\ndef test_import_attribute_aliasing_module():\n    check(\"import a.b as a\")\n\n\ndef test_import_in_function():\n    check(\n        dedent(\n            \"\"\"\\\n            def foo():\n                import a.b.c as d\n                from e.f import g\n                return None\n            \"\"\"\n        )\n    )\n    check(\n        dedent(\n            \"\"\"\\\n            def foo():\n                global d, g\n                import a.b.c as d\n                from e.f import g\n                return None\n            \"\"\"\n        )\n    )\n    check(\n        dedent(\n            \"\"\"\\\n            def foo():\n                d = None\n                g = None\n                def bar():\n                    nonlocal d, g\n                    import a.b.c as d\n                    from e.f import g\n                    return None\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_with_block():\n    check(\n        dedent(\n            \"\"\"\n            with a.b.c:\n                c = d\n                e = f()\n            \"\"\"\n        )\n    )\n\n    # Tests for various kinds of stores from the with assignment.\n    check(\n        dedent(\n            \"\"\"\n            with a as b:\n                c = d\n            \"\"\"\n        )\n    )\n\n    check(\n        dedent(\n            \"\"\"\n            def foo():\n                with a as b:\n                    c = d\n                return None\n            \"\"\"\n        )\n    )\n\n    check(\n        dedent(\n            \"\"\"\n            def foo():\n                global b\n                with a as b:\n                    c = d\n                return None\n            \"\"\"\n        )\n    )\n\n    check(\n        dedent(\n            \"\"\"\n            def foo():\n                with a as b:\n                    def bar():\n                        nonlocal b\n                        b = None\n                        return c\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_nested_with():\n    check(\n        dedent(\n            \"\"\"\n            with a:\n                with b:\n                    with c:\n                        x = 3\n                    y = 4\n                z = 5\n            \"\"\"\n        )\n    )\n    # This is indistinguishable in bytecode from:\n    # with a:\n    #     with b:\n    #         with c as d:\n    #             e = f\n    # We normalize the former to the latter.\n    check(\n        dedent(\n            \"\"\"\n            with a, b, c as d:\n                e = f\n            \"\"\"\n        ),\n    )\n\n\ndef test_simple_if():\n    check(\n        dedent(\n            \"\"\"\n            if a:\n                b = c\n            x = \"end\"\n            \"\"\"\n        )\n    )\n\n\ndef test_if_return():\n    check(\n        dedent(\n            \"\"\"\n            def f():\n                if a:\n                    return b\n                return None\n            \"\"\"\n        )\n    )\n\n\ndef test_if_else():\n    check(\n        dedent(\n            \"\"\"\n            if a:\n                b = c\n            else:\n                b = d\n            x = \"end\"\n            \"\"\"\n        )\n    )\n\n\n@pytest.mark.parametrize(\n    'last_statement,prefix',\n    product(\n        (\"\", \"x = 'end'\"),\n        (\"not\", \"not not\"),\n    ),\n)\ndef test_if_elif(last_statement, prefix):\n    check(\n        dedent(\n            \"\"\"\\\n            if {prefix} a:\n                b = c\n            elif d:\n                e = f\n            elif {prefix} g:\n                h = i\n            else:\n                j = k\n            {last_statement}\n            \"\"\"\n        ).format(prefix=prefix, last_statement=last_statement)\n    )\n\n    check(\n        dedent(\n            \"\"\"\n            if a:\n                x = \"before_b\"\n                if {prefix} b:\n                    x = \"in_b\"\n                elif b:\n                    x = \"in_elif_b\"\n                else:\n                    x = \"else_b\"\n                w = \"after_b\"\n            elif c:\n                x = \"in_c\"\n            else:\n                x = \"in_else\"\n            {last_statement}\n            \"\"\"\n        ).format(prefix=prefix, last_statement=last_statement)\n    )\n\n\n@pytest.mark.parametrize(\n    'op', ['and', 'or'],\n)\ndef test_boolops(op):\n    check_ = partial(check_formatted, op=op)\n\n    check_(\"a {op} b\")\n    check_(\"a {op} b {op} c\")\n    check_(\"a + (b {op} c)\")\n    check_(\"(a {op} b) + c\")\n    check_(\"(a + b) {op} (c + d)\")\n    check_(\"a + (b {op} c) + d\")\n\n    check_(\"a {op} (1 + (b {op} c))\")\n\n\n@pytest.mark.parametrize(\n    'op', ['and', 'or'],\n)\ndef test_normalize_nested_boolops(op):\n    check_ = partial(check_formatted, op=op)\n\n    # These generate identical bytecode, but they're different at the AST\n    # level.  We normalize to minimally-nested form.\n    check_(\"a {op} (b {op} c)\", \"a {op} b {op} c\")\n    check_(\"(a {op} b) {op} c\", \"a {op} b {op} c\")\n\n    check_(\"a {op} (b {op} (c {op} d))\", \"a {op} b {op} c {op} d\")\n    check_(\"((a {op} b) {op} c) {op} d\", \"a {op} b {op} c {op} d\")\n\n    check_(\"(a {op} b) {op} (c {op} d)\", \"a {op} b {op} c {op} d\")\n    check_(\"a {op} (b {op} c) {op} d\", \"a {op} b {op} c {op} d\")\n\n\ndef test_mixed_boolops():\n    check(\"a or b and c and d\")\n"
  },
  {
    "path": "codetransformer/tests/test_instructions.py",
    "content": "from codetransformer.instructions import Instruction\n\n\ndef test_repr_types():\n    assert repr(Instruction) == 'Instruction'\n    for tp in Instruction.__subclasses__():\n        assert repr(tp) == tp.opname\n"
  },
  {
    "path": "codetransformer/transformers/__init__.py",
    "content": "from .constants import asconstants\nfrom .interpolated_strings import interpolated_strings\nfrom .pattern_matched_exceptions import pattern_matched_exceptions\nfrom .precomputed_slices import precomputed_slices\nfrom .literals import (\n    bytearray_literals,\n    decimal_literals,\n    haskell_strs,\n    islice_literals,\n    overloaded_complexes,\n    overloaded_floats,\n    overloaded_ints,\n    overloaded_lists,\n    overloaded_sets,\n    overloaded_slices,\n    overloaded_strs,\n    overloaded_tuples,\n)\n\n\n__all__ = [\n    'asconstants',\n    'bytearray_literals',\n    'decimal_literals',\n    'haskell_strs',\n    'interpolated_strings',\n    'islice_literals',\n    'overloaded_complexes',\n    'overloaded_floats',\n    'overloaded_ints',\n    'overloaded_lists',\n    'overloaded_sets',\n    'overloaded_slices',\n    'overloaded_strs',\n    'overloaded_tuples',\n    'pattern_matched_exceptions',\n    'precomputed_slices',\n]\n"
  },
  {
    "path": "codetransformer/transformers/add2mul.py",
    "content": "\"\"\"\nadd2mul\n--------\n\nA transformer that replaces BINARY_ADD instructions with BINARY_MULTIPLY\ninstructions.\n\nThis isn't useful, but it's good introductory example/tutorial material.\n\"\"\"\nfrom codetransformer import CodeTransformer, pattern\nfrom codetransformer.instructions import BINARY_ADD, BINARY_MULTIPLY\n\n\nclass add2mul(CodeTransformer):\n    @pattern(BINARY_ADD)\n    def _add2mul(self, add_instr):\n        yield BINARY_MULTIPLY().steal(add_instr)\n"
  },
  {
    "path": "codetransformer/transformers/constants.py",
    "content": "import builtins\n\nfrom ..core import CodeTransformer\nfrom ..instructions import (\n    DELETE_DEREF,\n    DELETE_FAST,\n    DELETE_GLOBAL,\n    DELETE_NAME,\n    LOAD_CLASSDEREF,\n    LOAD_CONST,\n    LOAD_DEREF,\n    LOAD_GLOBAL,\n    LOAD_NAME,\n    STORE_DEREF,\n    STORE_FAST,\n    STORE_GLOBAL,\n    STORE_NAME,\n)\nfrom ..patterns import pattern\n\n\ndef _assign_or_del(type_):\n    assert type_ in ('assign to', 'delete')\n\n    def handler(self, instr):\n        name = instr.arg\n        if name not in self._constnames:\n            yield instr\n            return\n\n        code = self.code\n        filename = code.filename\n        lno = code.lno_of_instr[instr]\n        try:\n            with open(filename) as f:\n                line = f.readlines()[lno - 1]\n        except IOError:\n            line = '???'\n\n        raise SyntaxError(\n            \"can't %s constant name %r\" % (type_, name),\n            (filename, lno, len(line), line),\n        )\n\n    return handler\n\n\nclass asconstants(CodeTransformer):\n    \"\"\"\n    A code transformer that inlines names as constants.\n\n    - Positional arguments are interpreted as names of builtins (e.g. ``len``,\n      ``print``) to freeze as constants in the decorated function's namespace.\n\n    - Keyword arguments provide additional custom names to freeze as constants.\n\n    - If invoked with no positional or keyword arguments, ``asconstants``\n      inlines all names in ``builtins``.\n\n    Parameters\n    ----------\n    \\*builtin_names\n        Names of builtins to freeze as constants.\n    \\*\\*kwargs\n        Additional key-value pairs to bind as constants.\n\n    Examples\n    --------\n    Freezing Builtins:\n\n    >>> from codetransformer.transformers import asconstants\n    >>>\n    >>> @asconstants('len')\n    ... def with_asconstants(x):\n    ...     return len(x) * 2\n    ...\n    >>> def without_asconstants(x):\n    ...     return len(x) * 2\n    ...\n    >>> len = lambda x: 0\n    >>> with_asconstants([1, 2, 3])\n    6\n    >>> without_asconstants([1, 2, 3])\n    0\n\n    Adding Custom Constants:\n\n    >>> @asconstants(a=1)\n    ... def f():\n    ...     return a\n    ...\n    >>> f()\n    1\n    >>> a = 5\n    >>> f()\n    1\n    \"\"\"\n    def __init__(self, *builtin_names, **kwargs):\n        super().__init__()\n        bltins = vars(builtins)\n        if not (builtin_names or kwargs):\n            self._constnames = bltins.copy()\n        else:\n            self._constnames = constnames = {}\n            for arg in builtin_names:\n                constnames[arg] = bltins[arg]\n            overlap = constnames.keys() & kwargs.keys()\n            if overlap:\n                raise TypeError('Duplicate keys: {!r}'.format(overlap))\n            constnames.update(kwargs)\n\n    def transform(self, code, **kwargs):\n        overlap = self._constnames.keys() & set(code.argnames)\n        if overlap:\n            raise SyntaxError(\n                'argument names overlap with constant names: %r' % overlap,\n            )\n        return super().transform(code, **kwargs)\n\n    @pattern(LOAD_NAME | LOAD_GLOBAL | LOAD_DEREF | LOAD_CLASSDEREF)\n    def _load_name(self, instr):\n        name = instr.arg\n        if name not in self._constnames:\n            yield instr\n            return\n\n        yield LOAD_CONST(self._constnames[name]).steal(instr)\n\n    _store = pattern(\n        STORE_NAME | STORE_GLOBAL | STORE_DEREF | STORE_FAST,\n    )(_assign_or_del('assign to'))\n    _delete = pattern(\n        DELETE_NAME | DELETE_GLOBAL | DELETE_DEREF | DELETE_FAST,\n    )(_assign_or_del('delete'))\n"
  },
  {
    "path": "codetransformer/transformers/interpolated_strings.py",
    "content": "\"\"\"\nA transformer implementing ruby-style interpolated strings.\n\"\"\"\nimport sys\n\nfrom codetransformer import pattern, CodeTransformer\nfrom codetransformer.instructions import (\n    BUILD_TUPLE,\n    LOAD_CONST,\n    LOAD_ATTR,\n    CALL_FUNCTION,\n    CALL_FUNCTION_KW,\n    ROT_TWO,\n)\nfrom codetransformer.utils.functional import flatten, is_a\n\n\nclass interpolated_strings(CodeTransformer):\n    \"\"\"\n    A transformer that interpolates local variables into string literals.\n\n    Parameters\n    ----------\n    transform_bytes : bool, optional\n        Whether to transform bytes literals to interpolated unicode strings.\n        Default is True.\n    transform_str : bool, optional\n        Whether to interpolate values into unicode strings.\n        Default is False.\n\n    Example\n    -------\n    >>> @interpolated_strings()  # doctest: +SKIP\n    ... def foo(a, b):\n    ...     c = a + b\n    ...     return b\"{a} + {b} = {c}\"\n    ...\n    >>> foo(1, 2)  # doctest: +SKIP\n    '1 + 2 = 3'\n    \"\"\"\n\n    if sys.version_info >= (3, 6):\n        def __init__(self, *, transform_bytes=True, transform_str=False):\n            raise NotImplementedError(\n                '%s is not supported on 3.6 or newer, just use f-strings' %\n                type(self).__name__,\n            )\n    else:\n        def __init__(self, *, transform_bytes=True, transform_str=False):\n            super().__init__()\n            self._transform_bytes = transform_bytes\n            self._transform_str = transform_str\n\n    @property\n    def types(self):\n        \"\"\"\n        Tuple containing types transformed by this transformer.\n        \"\"\"\n        out = []\n        if self._transform_bytes:\n            out.append(bytes)\n        if self._transform_str:\n            out.append(str)\n        return tuple(out)\n\n    @pattern(LOAD_CONST)\n    def _load_const(self, instr):\n        const = instr.arg\n\n        if isinstance(const, (tuple, frozenset)):\n            yield from self._transform_constant_sequence(const)\n            return\n\n        if isinstance(const, bytes) and self._transform_bytes:\n            yield from self.transform_stringlike(const)\n        elif isinstance(const, str) and self._transform_str:\n            yield from self.transform_stringlike(const)\n        else:\n            yield instr\n\n    def _transform_constant_sequence(self, seq):\n        \"\"\"\n        Transform a frozenset or tuple.\n        \"\"\"\n        should_transform = is_a(self.types)\n\n        if not any(filter(should_transform, flatten(seq))):\n            # Tuple doesn't contain any transformable strings. Ignore.\n            yield LOAD_CONST(seq)\n            return\n\n        for const in seq:\n            if should_transform(const):\n                yield from self.transform_stringlike(const)\n            elif isinstance(const, (tuple, frozenset)):\n                yield from self._transform_constant_sequence(const)\n            else:\n                yield LOAD_CONST(const)\n\n        if isinstance(seq, tuple):\n            yield BUILD_TUPLE(len(seq))\n        else:\n            assert isinstance(seq, frozenset)\n            yield BUILD_TUPLE(len(seq))\n            yield LOAD_CONST(frozenset)\n            yield ROT_TWO()\n            yield CALL_FUNCTION(1)\n\n    def transform_stringlike(self, const):\n        \"\"\"\n        Yield instructions to process a str or bytes constant.\n        \"\"\"\n        yield LOAD_CONST(const)\n        if isinstance(const, bytes):\n            yield from self.bytes_instrs\n        elif isinstance(const, str):\n            yield from self.str_instrs\n\n    @property\n    def bytes_instrs(self):\n        \"\"\"\n        Yield instructions to call TOS.decode('utf-8').format(**locals()).\n        \"\"\"\n        yield LOAD_ATTR('decode')\n        yield LOAD_CONST('utf-8')\n        yield CALL_FUNCTION(1)\n        yield from self.str_instrs\n\n    @property\n    def str_instrs(self):\n        \"\"\"\n        Yield instructions to call TOS.format(**locals()).\n        \"\"\"\n        yield LOAD_ATTR('format')\n        yield LOAD_CONST(locals)\n        yield CALL_FUNCTION(0)\n        yield CALL_FUNCTION_KW()\n"
  },
  {
    "path": "codetransformer/transformers/literals.py",
    "content": "from collections import OrderedDict\nfrom decimal import Decimal\nfrom itertools import islice\nimport sys\nfrom textwrap import dedent\n\nfrom .. import instructions\nfrom ..core import CodeTransformer\nfrom ..patterns import pattern,  matchany, var\nfrom ..utils.instance import instance\n\n\nIN_COMPREHENSION = 'in_comprehension'\n\n\nclass overloaded_dicts(CodeTransformer):\n    \"\"\"Transformer that allows us to overload dictionary literals.\n\n    This acts by creating an empty map and then inserting every\n    key value pair in order.\n\n    The code that is generated will turn something like::\n\n        {k_0: v_0, k_1: v_1, ..., k_n: v_n}\n\n    into::\n\n        _tmp = astype()\n        _tmp[k_0] = v_0\n        _tmp[k_1] = v_1\n        ...\n        _tmp[k_n] = v_n\n        _tmp  # leaves the map on the stack.\n\n    Parameters\n    ----------\n    astype : callable\n        The constructor for the type to create.\n\n    Examples\n    --------\n    >>> from collections import OrderedDict\n    >>> ordereddict_literals = overloaded_dicts(OrderedDict)\n    >>> @ordereddict_literals\n    ... def f():\n    ...     return {'a': 1, 'b': 2, 'c': 3}\n    ...\n    >>> f()\n    OrderedDict([('a', 1), ('b', 2), ('c', 3)])\n    \"\"\"\n    def __init__(self, astype):\n        super().__init__()\n        self.astype = astype\n\n    @pattern(instructions.BUILD_MAP, matchany[var], instructions.MAP_ADD)\n    def _start_comprehension(self, instr, *instrs):\n        yield instructions.LOAD_CONST(self.astype).steal(instr)\n        # TOS  = self.astype\n\n        yield instructions.CALL_FUNCTION(0)\n        # TOS  = m = self.astype()\n\n        yield instructions.STORE_FAST('__map__')\n\n        *body, map_add = instrs\n        yield from self.patterndispatcher(body)\n        # TOS  = k\n        # TOS1 = v\n\n        yield instructions.LOAD_FAST('__map__').steal(map_add)\n        # TOS  = __map__\n        # TOS1 = k\n        # TOS2 = v\n\n        yield instructions.ROT_TWO()\n        # TOS  = k\n        # TOS1 = __map__\n        # TOS2 = v\n\n        yield instructions.STORE_SUBSCR()\n        self.begin(IN_COMPREHENSION)\n\n    @pattern(instructions.RETURN_VALUE, startcodes=(IN_COMPREHENSION,))\n    def _return_value(self, instr):\n        yield instructions.LOAD_FAST('__map__').steal(instr)\n        # TOS  = __map__\n\n        yield instr\n\n    if sys.version_info[:2] <= (3, 4):\n        # Python 3.4\n\n        @pattern(instructions.BUILD_MAP)\n        def _build_map(self, instr):\n            yield instructions.LOAD_CONST(self.astype).steal(instr)\n            # TOS  = self.astype\n\n            yield instructions.CALL_FUNCTION(0)\n            # TOS  = m = self.astype()\n\n            yield from (instructions.DUP_TOP(),) * instr.arg\n            # TOS  = m\n            # ...\n            # TOS[instr.arg] = m\n\n        @pattern(instructions.STORE_MAP)\n        def _store_map(self, instr):\n            # TOS  = k\n            # TOS1 = v\n            # TOS2 = m\n            # TOS3 = m\n\n            yield instructions.ROT_THREE().steal(instr)\n            # TOS  = v\n            # TOS1 = m\n            # TOS2 = k\n            # TOS3 = m\n\n            yield instructions.ROT_THREE()\n            # TOS  = m\n            # TOS1 = k\n            # TOS2 = v\n            # TOS3 = m\n\n            yield instructions.ROT_TWO()\n            # TOS  = k\n            # TOS1 = m\n            # TOS2 = v\n            # TOS3 = m\n\n            yield instructions.STORE_SUBSCR()\n            # TOS  = m\n\n    else:\n        # Python 3.5 and beyond!\n\n        def _construct_map(self, key_value_pairs):\n            mapping = self.astype()\n            for key, value in zip(key_value_pairs[::2], key_value_pairs[1::2]):\n                mapping[key] = value\n            return mapping\n\n        @pattern(instructions.BUILD_MAP)\n        def _build_map(self, instr):\n            # TOS      = vn\n            # TOS1     = kn\n            # ...\n            # TOSN     = v0\n            # TOSN + 1 = k0\n            # Construct a tuple of (k0, v0, k1, v1, ..., kn, vn) for\n            # each of the key: value pairs in the dictionary.\n            yield instructions.BUILD_TUPLE(instr.arg * 2).steal(instr)\n            # TOS  = (k0, v0, k1, v1, ..., kn, vn)\n\n            yield instructions.LOAD_CONST(self._construct_map)\n            # TOS  = self._construct_map\n            # TOS1 = (k0, v0, k1, v1, ..., kn, vn)\n\n            yield instructions.ROT_TWO()\n            # TOS  = (k0, v0, k1, v1, ..., kn, vn)\n            # TOS1 = self._construct_map\n\n            yield instructions.CALL_FUNCTION(1)\n\n    if sys.version_info >= (3, 6):\n        def _construct_const_map(self, values, keys):\n            mapping = self.astype()\n            for key, value in zip(keys, values):\n                mapping[key] = value\n            return mapping\n\n        @pattern(instructions.LOAD_CONST, instructions.BUILD_CONST_KEY_MAP)\n        def _build_const_map(self, keys, instr):\n            yield instructions.BUILD_TUPLE(len(keys.arg)).steal(keys)\n            # TOS  = (v0, v1, ..., vn)\n\n            yield keys\n            # TOS  = (k0, k1, ..., kn)\n            # TOS1 = (v0, v1, ..., vn)\n\n            yield instructions.LOAD_CONST(self._construct_const_map)\n            # TOS  = self._construct_const_map\n            # TOS1 = (k0, k1, ..., kn)\n            # TOS2 = (v0, v1, ..., vn)\n\n            yield instructions.ROT_THREE()\n            # TOS  = (k0, k1, ..., kn)\n            # TOS1 = (v0, v1, ..., vn)\n            # TOS2 = self._construct_const_map\n\n            yield instructions.CALL_FUNCTION(2)\n\n\nordereddict_literals = overloaded_dicts(OrderedDict)\n\n\ndef _format_constant_docstring(type_):\n    return dedent(\n        \"\"\"\n        Transformer that applies a callable to each {type_} constant in the\n        transformed code object.\n\n        Parameters\n        ----------\n        xform : callable\n            A callable to be applied to {type_} literals.\n\n        See Also\n        --------\n        codetransformer.transformers.literals.overloaded_strs\n        \"\"\"\n    ).format(type_=type_.__name__)\n\n\nclass _ConstantTransformerBase(CodeTransformer):\n\n    def __init__(self, xform):\n        super().__init__()\n        self.xform = xform\n\n    def transform_consts(self, consts):\n        # This is all one expression.\n        return super().transform_consts(\n            tuple(\n                frozenset(self.transform_consts(tuple(const)))\n                if isinstance(const, frozenset)\n                else self.transform_consts(const)\n                if isinstance(const, tuple)\n                else self.xform(const)\n                if isinstance(const, self._type)\n                else const\n                for const in consts\n            )\n        )\n\n\ndef overloaded_constants(type_, __doc__=None):\n    \"\"\"A factory for transformers that apply functions to literals.\n\n    Parameters\n    ----------\n    type_ : type\n        The type to overload.\n    __doc__ : str, optional\n        Docstring for the generated transformer.\n\n    Returns\n    -------\n    transformer : subclass of CodeTransformer\n        A new code transformer class that will overload the provided\n        literal types.\n    \"\"\"\n    typename = type_.__name__\n    if typename.endswith('x'):\n        typename += 'es'\n    elif not typename.endswith('s'):\n        typename += 's'\n\n    if __doc__ is None:\n        __doc__ = _format_constant_docstring(type_)\n\n    return type(\n        \"overloaded_\" + typename,\n        (_ConstantTransformerBase,), {\n            '_type': type_,\n            '__doc__': __doc__,\n        },\n    )\n\n\noverloaded_strs = overloaded_constants(\n    str,\n    __doc__=dedent(\n        \"\"\"\n        A transformer that overloads string literals.\n\n        Rewrites all constants of the form::\n\n            \"some string\"\n\n        as::\n\n            xform(\"some string\")\n\n        Parameters\n        ----------\n        xform : callable\n            Function to call on all string literals in the transformer target.\n\n        Examples\n        --------\n        >>> @overloaded_strs(lambda x: \"ayy lmao \")\n        ... def prepend_foo(s):\n        ...     return \"foo\" + s\n        ...\n        >>> prepend_foo(\"bar\")\n        'ayy lmao bar'\n        \"\"\"\n    )\n)\noverloaded_bytes = overloaded_constants(bytes)\noverloaded_floats = overloaded_constants(float)\noverloaded_ints = overloaded_constants(int)\noverloaded_complexes = overloaded_constants(complex)\n\nhaskell_strs = overloaded_strs(tuple)\nbytearray_literals = overloaded_bytes(bytearray)\ndecimal_literals = overloaded_floats(Decimal)\n\n\ndef _start_comprehension(self, *instrs):\n    self.begin(IN_COMPREHENSION)\n    yield from self.patterndispatcher(instrs)\n\n\ndef _return_value(self, instr):\n    # TOS  = collection\n\n    yield instructions.LOAD_CONST(self.xform).steal(instr)\n    # TOS  = self.xform\n    # TOS1 = collection\n\n    yield instructions.ROT_TWO()\n    # TOS  = collection\n    # TOS1 = self.xform\n\n    yield instructions.CALL_FUNCTION(1)\n    # TOS  = self.xform(collection)\n\n    yield instr\n\n\n# Added as a method for overloaded_build\ndef _build(self, instr):\n    yield instr\n    # TOS  = new_list\n\n    yield instructions.LOAD_CONST(self.xform)\n    # TOS  = astype\n    # TOS1 = new_list\n\n    yield instructions.ROT_TWO()\n    # TOS  = new_list\n    # TOS1 = astype\n\n    yield instructions.CALL_FUNCTION(1)\n    # TOS  = astype(new_list)\n\n\ndef overloaded_build(type_, add_name=None):\n    \"\"\"Factory for constant transformers that apply to a given\n    build instruction.\n\n    Parameters\n    ----------\n    type_ : type\n        The object type to overload the construction of. This must be one of\n        \"buildable\" types, or types with a \"BUILD_*\" instruction.\n    add_name : str, optional\n        The suffix of the instruction tha adds elements to the collection.\n        For example: 'add' or 'append'\n\n    Returns\n    -------\n    transformer : subclass of CodeTransformer\n        A new code transformer class that will overload the provided\n        literal types.\n    \"\"\"\n    typename = type_.__name__\n    instrname = 'BUILD_' + typename.upper()\n    dict_ = OrderedDict(\n        __doc__=dedent(\n            \"\"\"\n            A CodeTransformer for overloading {name} instructions.\n            \"\"\".format(name=instrname)\n        )\n    )\n\n    try:\n        build_instr = getattr(instructions, instrname)\n    except AttributeError:\n        raise TypeError(\"type %s is not buildable\" % typename)\n\n    if add_name is not None:\n        try:\n            add_instr = getattr(\n                instructions,\n                '_'.join((typename, add_name)).upper(),\n            )\n        except AttributeError:\n            TypeError(\"type %s is not addable\" % typename)\n\n        dict_['_start_comprehension'] = pattern(\n            build_instr, matchany[var], add_instr,\n        )(_start_comprehension)\n        dict_['_return_value'] = pattern(\n            instructions.RETURN_VALUE, startcodes=(IN_COMPREHENSION,),\n        )(_return_value)\n    else:\n        add_instr = None\n\n    dict_['_build'] = pattern(build_instr)(_build)\n\n    if not typename.endswith('s'):\n        typename = typename + 's'\n\n    return type(\n        'overloaded_' + typename,\n        (overloaded_constants(type_),),\n        dict_,\n    )\n\n\noverloaded_slices = overloaded_build(slice)\noverloaded_lists = overloaded_build(list, 'append')\noverloaded_sets = overloaded_build(set, 'add')\n\n\n# Add a special method for set overloader.\ndef transform_consts(self, consts):\n    consts = super(overloaded_sets, self).transform_consts(consts)\n    return tuple(\n        # Always pass a thawed set so mutations can happen inplace.\n        self.xform(set(const)) if isinstance(const, frozenset) else const\n        for const in consts\n    )\n\n\noverloaded_sets.transform_consts = transform_consts\ndel transform_consts\nfrozenset_literals = overloaded_sets(frozenset)\n\n\noverloaded_tuples = overloaded_build(tuple)\n\n\n# Add a special method for the tuple overloader.\ndef transform_consts(self, consts):\n    consts = super(overloaded_tuples, self).transform_consts(consts)\n    return tuple(\n        self.xform(const) if isinstance(const, tuple) else const\n        for const in consts\n    )\n\n\noverloaded_tuples.transform_consts = transform_consts\ndel transform_consts\n\n\n@instance\nclass islice_literals(CodeTransformer):\n    \"\"\"Transformer that turns slice indexing into an islice object.\n\n    Examples\n    --------\n    >>> from codetransformer.transformers.literals import islice_literals\n    >>> @islice_literals\n    ... def f():\n    ...     return map(str, (1, 2, 3, 4))[:2]\n    ...\n    >>> f()\n    <itertools.islice at ...>\n    >>> tuple(f())\n    ('1', '2')\n    \"\"\"\n    @pattern(instructions.BINARY_SUBSCR)\n    def _binary_subscr(self, instr):\n        yield instructions.LOAD_CONST(self._islicer).steal(instr)\n        # TOS  = self._islicer\n        # TOS1 = k\n        # TOS2 = m\n\n        yield instructions.ROT_THREE()\n        # TOS  = k\n        # TOS1 = m\n        # TOS2 = self._islicer\n\n        yield instructions.CALL_FUNCTION(2)\n        # TOS  = self._islicer(m, k)\n\n    @staticmethod\n    def _islicer(m, k):\n        if isinstance(k, slice):\n            return islice(m, k.start, k.stop, k.step)\n\n        return m[k]\n"
  },
  {
    "path": "codetransformer/transformers/pattern_matched_exceptions.py",
    "content": "import sys\n\nfrom ..core import CodeTransformer\nfrom ..instructions import (\n    BUILD_TUPLE,\n    CALL_FUNCTION,\n    COMPARE_OP,\n    LOAD_CONST,\n    POP_TOP,\n    ROT_TWO,\n)\nfrom ..patterns import pattern\n\n\ndef match(match_expr, exc_type, exc_value, exc_traceback):\n    \"\"\"\n    Called to determine whether or not an except block should be matched.\n\n    True -> enter except block\n    False -> don't enter except block\n    \"\"\"\n    # Emulate standard behavior when match_expr is an exception subclass.\n    if isinstance(match_expr, type) and issubclass(match_expr, BaseException):\n        return issubclass(exc_type, match_expr)\n\n    # Match on type and args when match_expr is an exception instance.\n    return (\n        issubclass(exc_type, type(match_expr))\n        and\n        match_expr.args == exc_value.args\n    )\n\n\nclass pattern_matched_exceptions(CodeTransformer):\n    \"\"\"\n    Allows usage of arbitrary expressions and matching functions in\n    `except` blocks.\n\n    When an exception is raised in an except block in a function decorated with\n    `pattern_matched_exceptions`, a matching function will be called with the\n    block's expression and the three values returned by sys.exc_info().  If the\n    matching function returns `True`, we enter the corresponding except-block,\n    otherwise we continue to the next block, or re-raise if there are no more\n    blocks to check\n\n    Parameters\n    ----------\n    matcher : function, optional\n        A function accepting an expression and the values of sys.exc_info,\n        returning True if the exception info \"matches\" the expression.\n\n        The default behavior is to emulate standard python when the match\n        expression is a *subtype* of Exception, and to compare exc.type and\n        exc.args when the match expression is an *instance* of Exception.\n\n    Example\n    -------\n    >>> @pattern_matched_exceptions()\n    ... def foo():\n    ...     try:\n    ...         raise ValueError('bar')\n    ...     except ValueError('buzz'):\n    ...         return 'buzz'\n    ...     except ValueError('bar'):\n    ...         return 'bar'\n    >>> foo()\n    'bar'\n    \"\"\"\n    def __init__(self, matcher=match):\n        super().__init__()\n        self._matcher = matcher\n\n    if sys.version_info < (3, 6):\n        from ..instructions import CALL_FUNCTION_VAR\n\n        def _match(self,\n                   instr,\n                   CALL_FUNCTION_VAR=CALL_FUNCTION_VAR):\n            yield ROT_TWO().steal(instr)\n            yield POP_TOP()\n            yield LOAD_CONST(self._matcher)\n            yield ROT_TWO()\n            yield LOAD_CONST(sys.exc_info)\n            yield CALL_FUNCTION(0)\n            yield CALL_FUNCTION_VAR(1)\n\n        del CALL_FUNCTION_VAR\n    else:\n        from ..instructions import (\n            CALL_FUNCTION_EX,\n            BUILD_TUPLE_UNPACK_WITH_CALL,\n        )\n\n        def _match(self,\n                   instr,\n                   CALL_FUNCTION_EX=CALL_FUNCTION_EX,\n                   BUILD_TUPLE_UNPACK_WITH_CALL=BUILD_TUPLE_UNPACK_WITH_CALL):\n            yield ROT_TWO().steal(instr)\n            yield POP_TOP()\n            yield LOAD_CONST(self._matcher)\n            yield ROT_TWO()\n            yield BUILD_TUPLE(1)\n            yield LOAD_CONST(sys.exc_info)\n            yield CALL_FUNCTION(0)\n            yield BUILD_TUPLE_UNPACK_WITH_CALL(2)\n            yield CALL_FUNCTION_EX(0)\n\n        del CALL_FUNCTION_EX\n        del BUILD_TUPLE_UNPACK_WITH_CALL\n\n    @pattern(COMPARE_OP)\n    def _compare_op(self, instr):\n        if instr.equiv(COMPARE_OP.EXCEPTION_MATCH):\n            yield from self._match(instr)\n        else:\n            yield instr\n"
  },
  {
    "path": "codetransformer/transformers/precomputed_slices.py",
    "content": "from codetransformer.core import CodeTransformer\nfrom codetransformer.instructions import LOAD_CONST, BUILD_SLICE\nfrom codetransformer.patterns import pattern, plus\n\n\nclass precomputed_slices(CodeTransformer):\n    \"\"\"\n    An optimizing transformer that precomputes and inlines slice literals.\n\n    Example\n    -------\n    >>> from dis import dis\n    >>> def first_five(l):\n    ...     return l[:5]\n    ...\n    >>> dis(first_five)  # doctest: +SKIP\n      2           0 LOAD_FAST                0 (l)\n                  3 LOAD_CONST               0 (None)\n                  6 LOAD_CONST               1 (5)\n                  9 BUILD_SLICE              2\n                 12 BINARY_SUBSCR\n                 13 RETURN_VALUE\n    >>> dis(precomputed_slices()(first_five))  # doctest: +SKIP\n      2           0 LOAD_FAST                0 (l)\n                  3 LOAD_CONST               0 (slice(None, 5, None))\n                  6 BINARY_SUBSCR\n                  7 RETURN_VALUE\n    \"\"\"\n    @pattern(LOAD_CONST[plus], BUILD_SLICE)\n    def make_constant_slice(self, *instrs):\n        *loads, build = instrs\n        if build.arg != len(loads):\n            # There are non-constant loads before the consts:\n            # e.g. x[<non-const expr>:1:2]\n            yield from instrs\n\n        slice_ = slice(*(instr.arg for instr in loads))\n        yield LOAD_CONST(slice_).steal(loads[0])\n"
  },
  {
    "path": "codetransformer/transformers/tests/__init__.py",
    "content": ""
  },
  {
    "path": "codetransformer/transformers/tests/test_add2mul.py",
    "content": "from ..add2mul import add2mul\n\n\ndef test_add2mul():\n\n    @add2mul()\n    def foo(a, b):\n        return (a + b + 2) - 1\n\n    assert foo(1, 2) == 3\n    assert foo(2, 2) == 7\n"
  },
  {
    "path": "codetransformer/transformers/tests/test_constants.py",
    "content": "import os\nfrom sys import _getframe\nfrom types import CodeType\n\nimport pytest\n\nfrom codetransformer.code import Code\nfrom ..constants import asconstants\n\n\nbasename = os.path.basename(__file__)\n\n\ndef test_global():\n\n    @asconstants(a=1)\n    def f():\n        return a  # noqa\n\n    assert f() == 1\n\n\ndef test_name():\n    for const in compile(\n            'class C:\\n    b = a', '<string>', 'exec').co_consts:\n\n        if isinstance(const, CodeType):\n            pre_transform = Code.from_pycode(const)\n            code = asconstants(a=1).transform(pre_transform)\n            break\n    else:\n        raise AssertionError('There should be a code object in there!')\n\n    ns = {}\n    exec(code.to_pycode(), ns)\n    assert ns['b'] == 1\n\n\ndef test_closure():\n    def f():\n        a = 2\n\n        @asconstants(a=1)\n        def g():\n            return a\n\n        return g\n\n    assert f()() == 1\n\n\ndef test_store():\n    with pytest.raises(SyntaxError) as e:\n        @asconstants(a=1)\n        def f():\n            a = 1  # noqa\n\n    line = _getframe().f_lineno - 2\n    assert (\n        str(e.value) ==\n        \"can't assign to constant name 'a' (%s, line %d)\" % (basename, line)\n    )\n\n\ndef test_delete():\n    with pytest.raises(SyntaxError) as e:\n        @asconstants(a=1)\n        def f():\n            del a  # noqa\n\n    line = _getframe().f_lineno - 2\n    assert (\n        str(e.value) ==\n        \"can't delete constant name 'a' (%s, line %d)\" % (basename, line)\n    )\n\n\ndef test_argname_overlap():\n    with pytest.raises(SyntaxError) as e:\n        @asconstants(a=1)\n        def f(a):\n            pass\n\n    assert str(e.value) == \"argument names overlap with constant names: {'a'}\"\n"
  },
  {
    "path": "codetransformer/transformers/tests/test_exc_patterns.py",
    "content": "from pytest import raises\nfrom ..pattern_matched_exceptions import pattern_matched_exceptions\n\n\ndef test_patterns():\n\n    @pattern_matched_exceptions()\n    def foo():\n        try:\n            raise ValueError(\"bar\")\n        except TypeError:\n            raise\n        except ValueError(\"foo\"):\n            raise\n        except ValueError(\"bar\"):\n            return \"bar\"\n        except ValueError(\"buzz\"):\n            raise\n\n    assert foo() == \"bar\"\n\n\ndef test_patterns_bind_name():\n\n    @pattern_matched_exceptions()\n    def foo():\n        try:\n            raise ValueError(\"bar\")\n        except ValueError(\"foo\") as e:\n            return e.args[0]\n        except ValueError(\"bar\") as e:\n            return e.args[0]\n        except ValueError(\"buzz\") as e:\n            return e.args[0]\n\n    assert foo() == \"bar\"\n\n\ndef test_patterns_reraise():\n\n    @pattern_matched_exceptions()\n    def foo():\n        try:\n            raise ValueError(\"bar\")\n        except ValueError(\"bar\"):\n            raise\n\n    with raises(ValueError) as err:\n        foo()\n\n    assert err.type == ValueError\n    assert err.value.args == ('bar',)\n\n\ndef test_normal_exc_match():\n\n    @pattern_matched_exceptions()\n    def foo():\n        try:\n            raise ValueError(\"bar\")\n        except ValueError:\n            return \"matched\"\n        except ValueError(\"bar\"):\n            raise\n\n    assert foo() == \"matched\"\n\n\ndef test_exc_match_custom_func():\n\n    def match_greater(expr, exc_type, exc_value, exc_traceback):\n        return expr > exc_value.args[0]\n\n    @pattern_matched_exceptions(match_greater)\n    def foo():\n        try:\n            raise ValueError(5)\n        except 4:\n            return 4\n        except 5:\n            return 5\n        except 6:\n            return 6\n\n    assert foo() == 6\n"
  },
  {
    "path": "codetransformer/transformers/tests/test_interpolated_strings.py",
    "content": "import sys\n\nimport pytest\n\nfrom ..interpolated_strings import interpolated_strings\n\n\npytestmark = pytest.mark.skipif(\n    sys.version_info >= (3, 6),\n    reason='interpolated_strings is deprecated, just use f-strings',\n)\n\n\ndef test_interpolated_bytes():\n\n    @interpolated_strings(transform_bytes=True)\n    def enabled(a, b, c):\n        return b\"{a} {b!r} {c}\"\n\n    assert enabled(1, 2, 3) == \"{a} {b!r} {c}\".format(a=1, b=2, c=3)\n\n    @interpolated_strings()\n    def default(a, b, c):\n        return b\"{a} {b!r} {c}\"\n\n    assert default(1, 2, 3) == \"{a} {b!r} {c}\".format(a=1, b=2, c=3)\n\n    @interpolated_strings(transform_bytes=False)\n    def disabled(a, b, c):\n        return b\"{a} {b!r} {c}\"\n\n    assert disabled(1, 2, 3) == b\"{a} {b!r} {c}\"\n\n\ndef test_interpolated_str():\n\n    @interpolated_strings(transform_str=True)\n    def enabled(a, b, c):\n        return \"{a} {b!r} {c}\"\n\n    assert enabled(1, 2, 3) == \"{a} {b!r} {c}\".format(a=1, b=2, c=3)\n\n    @interpolated_strings()\n    def default(a, b, c):\n        return \"{a} {b!r} {c}\"\n\n    assert default(1, 2, 3) == \"{a} {b!r} {c}\"\n\n    @interpolated_strings(transform_bytes=False)\n    def disabled(a, b, c):\n        return \"{a} {b!r} {c}\"\n\n    assert disabled(1, 2, 3) == \"{a} {b!r} {c}\"\n\n\ndef test_no_cross_pollination():\n\n    @interpolated_strings(transform_bytes=True)\n    def ignore_str(a):\n        u = \"{a}\"\n        b = b\"{a}\"\n        return u, b\n\n    assert ignore_str(1) == (\"{a}\", \"1\")\n\n    @interpolated_strings(transform_bytes=False, transform_str=True)\n    def ignore_bytes(a):\n        u = \"{a}\"\n        b = b\"{a}\"\n        return u, b\n\n    assert ignore_bytes(1) == (\"1\", b\"{a}\")\n\n\ndef test_string_in_nested_const():\n\n    @interpolated_strings(transform_str=True)\n    def foo(a, b):\n        return (\"{a}\", ((\"{b}\",), \"{a} {b}\"), (1, 2))\n\n    assert foo(1, 2) == (\"1\", ((\"2\",), \"1 2\"), (1, 2))\n\n    @interpolated_strings(transform_str=True)\n    def bar(a):\n        return \"1\" in {\"{a}\"}\n\n    assert bar(1)\n    assert not bar(2)\n"
  },
  {
    "path": "codetransformer/transformers/tests/test_literals.py",
    "content": "\"\"\"\nTests for literal transformers\n\"\"\"\nfrom collections import OrderedDict\nfrom decimal import Decimal\nfrom itertools import islice\n\nfrom ..literals import (\n    islice_literals,\n    overloaded_dicts,\n    overloaded_bytes,\n    overloaded_floats,\n    overloaded_lists,\n    overloaded_sets,\n    overloaded_slices,\n    overloaded_strs,\n    overloaded_tuples,\n)\n\n\ndef test_overload_thing_with_thing_is_noop():\n    test_vals = [('a', 1), ('b', 2), ('c', 3)]\n    for t in dict, set, list, tuple:\n        expected = t(test_vals)\n        f = eval(\"lambda: %s\" % (expected,))\n        overloaded = eval(t.__name__.join(['overloaded_', 's']))(t)(f)\n        assert f() == overloaded() == expected\n\n\ndef test_overloaded_dicts():\n\n    @overloaded_dicts(OrderedDict)\n    def literal():\n        return {'a': 1, 'b': 2, 'c': 3}\n\n    assert literal() == OrderedDict((('a', 1), ('b', 2), ('c', 3)))\n\n    @overloaded_dicts(OrderedDict)\n    def comprehension():\n        return {k: n for n, k in enumerate('abc', 1)}\n\n    assert comprehension() == OrderedDict((('a', 1), ('b', 2), ('c', 3)))\n\n\ndef test_overloaded_bytes():\n\n    @overloaded_bytes(list)\n    def bytes_to_list():\n        return [\"unicode\", b\"bytes\", 1, 2, 3]\n\n    assert bytes_to_list() == [\"unicode\", list(b\"bytes\"), 1, 2, 3]\n\n    @overloaded_bytes(list)\n    def bytes_to_list_tuple():\n        return \"unicode\", b\"bytes\", 1, 2, 3\n\n    assert bytes_to_list_tuple() == (\"unicode\", list(b\"bytes\"), 1, 2, 3)\n\n    @overloaded_bytes(int)\n    def bytes_in_set(x):\n        return x in {b'3'}\n\n    assert not bytes_in_set(b'3')\n    assert bytes_in_set(3)\n\n    @overloaded_bytes(bytearray)\n    def mutable_bytes():\n        return b'123'\n\n    assert isinstance(mutable_bytes(), bytearray)\n\n\ndef test_overloaded_floats():\n\n    @overloaded_floats(Decimal)\n    def float_to_decimal():\n        return [2, 2.0, 3.5]\n\n    assert float_to_decimal() == [2, Decimal(2.0), Decimal(3.5)]\n\n    @overloaded_floats(Decimal)\n    def float_to_decimal_tuple():\n        return (2, 2.0, 3.5)\n\n    assert float_to_decimal_tuple() == (2, Decimal(2.0), Decimal(3.5))\n\n    @overloaded_floats(Decimal)\n    def float_in_set(x):\n        return x in {3.0}\n\n    xformed_const = float_in_set.__code__.co_consts[0]\n    assert isinstance(xformed_const, frozenset)\n    assert len(xformed_const) == 1\n    assert isinstance(tuple(xformed_const)[0], Decimal)\n    assert tuple(xformed_const)[0] == Decimal(3.0)\n\n\ndef test_overloaded_lists():\n\n    @overloaded_lists(tuple)\n    def frozen_list():\n        return [1, 2, 3]\n\n    assert frozen_list() == (1, 2, 3)\n\n    @overloaded_lists(tuple)\n    def frozen_in_tuple():\n        return [1, 2, 3], [4, 5, 6]\n\n    assert frozen_in_tuple() == ((1, 2, 3), (4, 5, 6))\n\n    @overloaded_lists(tuple)\n    def frozen_in_set():\n        # lists are not hashable but tuple are.\n        return [1, 2, 3] in {[1, 2, 3]}\n\n    assert frozen_in_set()\n\n    @overloaded_lists(tuple)\n    def frozen_comprehension():\n        return [a for a in (1, 2, 3)]\n\n    assert frozen_comprehension() == (1, 2, 3)\n\n\ndef test_overloaded_strs():\n\n    @overloaded_strs(tuple)\n    def haskell_strs():\n        return 'abc'\n\n    assert haskell_strs() == ('a', 'b', 'c')\n\n    @overloaded_strs(tuple)\n    def cs_in_tuple():\n        return 'abc', 'def'\n\n    assert cs_in_tuple() == (('a', 'b', 'c'), ('d', 'e', 'f'))\n\n\ndef test_overloaded_sets():\n\n    @overloaded_sets(frozenset)\n    def f():\n        return {'a', 'b', 'c'}\n\n    assert isinstance(f(), frozenset)\n    assert f() == frozenset({'a', 'b', 'c'})\n\n    class invertedset(set):\n        def __contains__(self, e):\n            return not super().__contains__(e)\n\n    @overloaded_sets(invertedset)\n    def containment_with_consts():\n        # This will create a frozenset FIRST and then we should pull it\n        # into an invertedset\n        return 'd' in {'e'}\n\n    assert containment_with_consts()\n\n    def frozen_comprehension():\n        return {a for a in 'abc'}\n\n    assert frozen_comprehension() == frozenset('abc')\n\n\ndef test_overloaded_tuples():\n\n    @overloaded_tuples(list)\n    def nonconst():\n        a = 1\n        b = 2\n        c = 3\n        return (a, b, c)\n\n    assert nonconst() == [1, 2, 3]\n\n    @overloaded_tuples(list)\n    def const():\n        return (1, 2, 3)\n\n    assert const() == [1, 2, 3]\n\n\ndef test_overloaded_slices():\n\n    def concrete_slice(slice_):\n        return tuple(range(slice_.start, slice_.stop))[::slice_.step]\n\n    class C:\n        _idx = None\n\n        def __getitem__(self, idx):\n            self._idx = idx\n            return idx\n\n    c = C()\n\n    @overloaded_slices(concrete_slice)\n    def f():\n        return c[1:10:2]\n\n    f()\n    assert c._idx == (1, 3, 5, 7, 9)\n\n\ndef test_islice_literals():\n\n    @islice_literals\n    def islice_test():\n        return map(str, (1, 2, 3, 4))[:2]\n\n    assert isinstance(islice_test(), islice)\n    assert tuple(islice_test()) == ('1', '2')\n"
  },
  {
    "path": "codetransformer/transformers/tests/test_precomputed_slices.py",
    "content": "from codetransformer.code import Code\nfrom codetransformer.instructions import BUILD_SLICE, LOAD_CONST\n\nfrom ..precomputed_slices import precomputed_slices\n\n\ndef test_precomputed_slices():\n\n    @precomputed_slices()\n    def foo(a):\n        return a[1:5]\n\n    l = list(range(10))\n    assert foo(l) == l[1:5]\n    assert slice(1, 5) in foo.__code__.co_consts\n\n    instrs = Code.from_pyfunc(foo).instrs\n    assert LOAD_CONST(slice(1, 5)).equiv(instrs[1])\n    assert BUILD_SLICE not in set(map(type, instrs))\n\n\ndef test_precomputed_slices_non_const():\n\n    transformer = precomputed_slices()\n\n    def f(a, b):\n        with_non_const = a[b]\n        with_mixed = a[1, b]\n        return with_non_const, with_mixed\n\n    transformed = transformer(f)\n\n    f_instrs = Code.from_pyfunc(f).instrs\n    transformed_instrs = Code.from_pyfunc(transformed).instrs\n\n    for orig, xformed in zip(f_instrs, transformed_instrs):\n        assert orig.equiv(xformed)\n"
  },
  {
    "path": "codetransformer/utils/__init__.py",
    "content": ""
  },
  {
    "path": "codetransformer/utils/functional.py",
    "content": "\"\"\"\ncodetransformer.utils.functional\n--------------------------------\n\nUtilities for functional programming.\n\"\"\"\n\nfrom toolz import complement, flip\n\n\ndef is_a(type_):\n    \"\"\"More curryable version of isinstance.\"\"\"\n    return flip(isinstance, type_)\n\n\ndef not_a(type_):\n    \"\"\"More curryable version of not isinstance.\"\"\"\n    return complement(is_a(type_))\n\n\ndef scanl(f, n, ns):\n    \"\"\"Reduce ns by f starting with n yielding each intermediate value.\n\n    tuple(scanl(f, n, ns))[-1] == reduce(f, ns, n)\n\n    Parameters\n    ----------\n    f : callable\n        A binary function.\n    n : any\n        The starting value.\n    ns : iterable of any\n        The iterable to scan over.\n\n    Yields\n    ------\n    p : any\n        The value of reduce(f, ns[:idx]) where idx is the current index.\n\n    Examples\n    --------\n    >>> import operator as op\n    >>> tuple(scanl(op.add, 0, (1, 2, 3, 4)))\n    (0, 1, 3, 6, 10)\n    \"\"\"\n    yield n\n    for m in ns:\n        n = f(n, m)\n        yield n\n\n\ndef reverse_dict(d):\n    \"\"\"Reverse a dictionary, replacing the keys and values.\n\n    Parameters\n    ----------\n    d : dict\n        The dict to reverse.\n\n    Returns\n    -------\n    rd : dict\n        The dict with the keys and values flipped.\n\n    Examples\n    --------\n    >>> d = {'a': 1, 'b': 2, 'c': 3}\n    >>> e = reverse_dict(d)\n    >>> e == {1: 'a', 2: 'b', 3: 'c'}\n    True\n    \"\"\"\n    return {v: k for k, v in d.items()}\n\n\ndef ffill(iterable):\n    \"\"\"Forward fill non None values in some iterable.\n\n    Parameters\n    ----------\n    iterable : iterable\n        The iterable to forward fill.\n\n    Yields\n    ------\n    e : any\n        The last non None value or None if there has not been a non None value.\n    \"\"\"\n    it = iter(iterable)\n    previous = next(it)\n    yield previous\n    for e in it:\n        if e is None:\n            yield previous\n        else:\n            previous = e\n            yield e\n\n\ndef flatten(seq, *, recurse_types=(tuple, list, set, frozenset)):\n    \"\"\"\n    Convert a (possibly nested) iterator into a flattened iterator.\n\n    Parameters\n    ----------\n    seq : iterable\n        The sequence to flatten.\n    recurse_types, optional\n        Types to recursively flatten.\n        Defaults to (tuple, list, set, frozenset).\n\n    >>> list(flatten((1, (2, 3), ((4,), 5))))\n    [1, 2, 3, 4, 5]\n\n    >>> list(flatten([\"abc\", \"def\"], recurse_types=(str,)))\n    ['a', 'b', 'c', 'd', 'e', 'f']\n    \"\"\"\n    for elem in seq:\n        if isinstance(elem, recurse_types):\n            yield from flatten(elem)\n        else:\n            yield elem\n"
  },
  {
    "path": "codetransformer/utils/immutable.py",
    "content": "\"\"\"\ncodetransformer.utils.immutable\n-------------------------------\n\nUtilities for creating and working with immutable objects.\n\"\"\"\n\nfrom collections import ChainMap\nfrom inspect import getfullargspec\nfrom itertools import starmap, repeat\nfrom textwrap import dedent\nfrom weakref import WeakKeyDictionary\n\n\nclass immutableattr:\n    \"\"\"An immutable attribute of a class.\n\n    Parameters\n    ----------\n    attr : any\n        The attribute.\n    \"\"\"\n    def __init__(self, attr):\n        self._attr = attr\n\n    def __get__(self, instance, owner):\n        return self._attr\n\n\nclass lazyval:\n    \"\"\"A memoizing property.\n\n    Parameters\n    ----------\n    func : callable\n        The function used to compute the value of the descriptor.\n    \"\"\"\n    def __init__(self, func):\n        self._cache = WeakKeyDictionary()\n        self._func = func\n\n    def __get__(self, instance, owner):\n        if instance is None:\n            return self\n\n        cache = self._cache\n        try:\n            return cache[instance]\n        except KeyError:\n            cache[instance] = val = self._func(instance)\n            return val\n\n\ndef _no_arg_init(self):\n    pass\n\n\nobject_setattr = object.__setattr__\n\n\ndef initialize_slot(obj, name, value):\n    \"\"\"Initalize an unitialized slot to a value.\n\n    If there is already a value for this slot, this is a nop.\n\n    Parameters\n    ----------\n    obj : immutable\n        An immutable object.\n    name : str\n        The name of the slot to initialize.\n    value : any\n        The value to initialize the slot to.\n    \"\"\"\n    if not hasattr(obj, name):\n        object_setattr(obj, name, value)\n\n\ndef _create_init(name, slots, defaults):\n    \"\"\"Create the __init__ function for an immutable object.\n\n    Parameters\n    ----------\n    name : str\n        The name of the immutable class.\n    slots : iterable of str\n        The __slots__ field from the class.\n    defaults : dict or None\n        The default values for the arguments to __init__.\n\n    Returns\n    -------\n    init : callable\n        The __init__ function for the new immutable class.\n    \"\"\"\n    if any(s.startswith('__') for s in slots):\n        raise TypeError(\n            \"immutable classes may not have slots that start with '__'\",\n        )\n\n    # If we have no defaults, ignore all of this.\n    kwdefaults = None\n    if defaults is not None:\n        hit_default = False\n        _defaults = []  # positional defaults\n        kwdefaults = {}  # kwonly defaults\n        kwdefs = False\n        for s in slots:\n            if s not in defaults and hit_default:\n                raise SyntaxError(\n                    'non-default argument follows default argument'\n                )\n\n            if not kwdefs:\n                try:\n                    # Try to grab the next default.\n                    # Pop so that we know they were all consumed when we\n                    # are done.\n                    _defaults.append(defaults.pop(s))\n                except KeyError:\n                    # Not in the dict, we haven't hit any defaults yet.\n                    pass\n                else:\n                    # We are now consuming default arguments.\n                    hit_default = True\n                if s.startswith('*'):\n                    if s in defaults:\n                        raise TypeError(\n                            'cannot set default for var args or var kwargs',\n                        )\n                    if not s.startswith('**'):\n                        kwdefs = True\n            else:\n                kwdefaults[s] = defaults.pop(s)\n\n        if defaults:\n            # We didn't consume all of the defaults.\n            raise TypeError(\n                'default value for non-existent argument%s: %s' % (\n                    's' if len(defaults) > 1 else '',\n                    ', '.join(starmap('{0}={1!r}'.format, defaults.items())),\n                )\n            )\n\n        # cast back to tuples\n        defaults = tuple(_defaults)\n\n    if not slots:\n        return _no_arg_init, ()\n\n    ns = {'__initialize_slot': initialize_slot}\n    # filter out lone star\n    slotnames = tuple(filter(None, (s.strip('*') for s in slots)))\n    # We are using exec here so that we can later inspect the call signature\n    # of the __init__. This makes the positional vs keywords work as intended.\n    # This is totally reasonable, no h8 m8!\n    exec(\n        'def __init__(_{name}__self, {args}):    \\n    {assign}'.format(\n            name=name,\n            args=', '.join(slots),\n            assign='\\n    '.join(\n                map(\n                    '__initialize_slot(_{1}__self, \"{0}\", {0})'.format,\n                    slotnames,\n                    repeat(name),\n                ),\n            ),\n        ),\n        ns,\n    )\n    init = ns['__init__']\n    init.__defaults__ = defaults\n    init.__kwdefaults__ = kwdefaults\n    return init, slotnames\n\n\ndef _wrapinit(init):\n    \"\"\"Wrap an existing initialize function by thawing self for the duration\n    of the init.\n\n    Parameters\n    ----------\n    init : callable\n        The user-provided init.\n\n    Returns\n    -------\n    wrapped : callable\n        The wrapped init method.\n    \"\"\"\n    try:\n        spec = getfullargspec(init)\n    except TypeError:\n        # we cannot preserve the type signature.\n        def __init__(*args, **kwargs):\n            self = args[0]\n            __setattr__._initializing.add(self)\n            init(*args, **kwargs)\n            __setattr__._initializing.remove(self)\n            _check_missing_slots(self)\n\n        return __init__\n\n    args = spec.args\n    varargs = spec.varargs\n    if not (args or varargs):\n        raise TypeError(\n            \"%r must accept at least one positional argument for 'self'\" %\n            getattr(init, '__qualname__', getattr(init, '__name__', init)),\n        )\n\n    if not args:\n        self = '%s[0]' % varargs\n        forward = argspec = '*' + varargs\n    else:\n        self = args[0]\n        forward = argspec = ', '.join(args)\n\n    if args and varargs:\n        forward = '%s, *%s' % (forward, spec.varargs)\n        argspec = '%s, *%s' % (argspec, spec.varargs)\n    if spec.kwonlyargs:\n        forward = '%s, %s' % (\n            forward,\n            ', '.join(map('{0}={0}'.format, spec.kwonlyargs))\n        )\n        argspec = '%s,%s%s' % (\n            argspec,\n            '*, ' if not spec.varargs else '',\n            ', '.join(spec.kwonlyargs),\n        )\n    if spec.varkw:\n        forward = '%s, **%s' % (forward, spec.varkw)\n        argspec = '%s, **%s' % (argspec, spec.varkw)\n\n    ns = {\n        '__init': init,\n        '__initializing': __setattr__._initializing,\n        '__check_missing_slots': _check_missing_slots,\n    }\n    exec(\n        dedent(\n            \"\"\"\\\n            def __init__({argspec}):\n                __initializing.add({self})\n                __init({forward})\n                __initializing.remove({self})\n                __check_missing_slots({self})\n            \"\"\".format(\n                argspec=argspec,\n                self=self,\n                forward=forward,\n            ),\n        ),\n        ns,\n    )\n    __init__ = ns['__init__']\n    __init__.__defaults__ = spec.defaults\n    __init__.__kwdefaults__ = spec.kwonlydefaults\n    __init__.__annotations__ = spec.annotations\n    return __init__\n\n\ndef _check_missing_slots(ob):\n    \"\"\"Check that all slots have been initialized when a custom __init__ method\n    is provided.\n\n    Parameters\n    ----------\n    ob : immutable\n        The instance that was just initialized.\n\n    Raises\n    ------\n    TypeError\n        Raised when the instance has not set values that are named in the\n        __slots__.\n    \"\"\"\n    missing_slots = tuple(\n        filter(lambda s: not hasattr(ob, s), ob.__slots__),\n    )\n    if missing_slots:\n        raise TypeError(\n            'not all slots initialized in __init__, missing: {0}'.format(\n                missing_slots,\n            ),\n        )\n\n\ndef __setattr__(self, name, value):\n    if self not in __setattr__._initializing:\n        raise AttributeError('cannot mutate immutable object')\n    object_setattr(self, name, value)\n\n\n__setattr__._initializing = set()\n\n\ndef __repr__(self):\n    return '{cls}({args})'.format(\n        cls=type(self).__name__,\n        args=', '.join(starmap(\n            '{0}={1!r}'.format,\n            ((s, getattr(self, s)) for s in self.__slots__),\n        )),\n    )\n\n\nclass ImmutableMeta(type):\n    \"\"\"A metaclass for creating immutable objects.\n    \"\"\"\n    def __new__(mcls, name, bases, dict_, *, defaults=None):\n        if '__slots__' not in dict_:\n            raise TypeError('immutable classes must have a __slots__')\n        if '__setattr__' in dict_:\n            raise TypeError('immutable classes cannot have a __setattr__')\n\n        try:\n            dict_['__init__'] = _wrapinit(dict_['__init__'])\n        except KeyError:\n            dict_['__init__'], dict_['__slots__'] = _create_init(\n                name,\n                dict_['__slots__'],\n                defaults,\n            )\n\n        dict_['__setattr__'] = __setattr__\n        cls = super().__new__(mcls, name, bases, dict_)\n\n        if cls.__repr__ is object.__repr__:\n            # Put a namedtuple-like repr on this class if there is no custom\n            # repr on the class.\n            cls.__repr__ = __repr__\n\n        return cls\n\n    def __init__(self, *args, defaults=None):\n        # ignore the defaults kwarg.\n        return super().__init__(*args)\n\n\nclass immutable(metaclass=ImmutableMeta):\n    \"\"\"A base class for immutable objects.\n    \"\"\"\n    __slots__ = ()\n\n    def to_dict(self):\n        return {s: getattr(self, s) for s in self.__slots__}\n\n    def update(self, **updates):\n        return type(self)(**ChainMap(updates, self.to_dict()))\n"
  },
  {
    "path": "codetransformer/utils/instance.py",
    "content": "def instance(cls):\n    \"\"\"Decorator for creating one of instances.\n\n    Parameters\n    ----------\n    cls : type\n        A class.\n\n    Returns\n    -------\n    instance : cls\n        A new instance of ``cls``.\n    \"\"\"\n    return cls()\n"
  },
  {
    "path": "codetransformer/utils/no_default.py",
    "content": "@object.__new__\nclass no_default:\n    def __new__(cls):\n        return no_default\n\n    def __repr__(self):\n        return 'no_default'\n    __str__ = __repr__\n\n    def __reduce__(self):\n        return 'no_default'\n\n    def __deepcopy__(self):\n        return self\n    __copy__ = __deepcopy__\n"
  },
  {
    "path": "codetransformer/utils/pretty.py",
    "content": "\"\"\"\ncodetransformer.utils.pretty\n----------------------------\n\nUtilities for pretty-printing ASTs and code objects.\n\"\"\"\nfrom ast import iter_fields, AST, Name, Num, parse\nimport dis\nfrom functools import partial, singledispatch\nfrom io import StringIO\nfrom itertools import chain\nfrom operator import attrgetter\nimport sys\nfrom types import CodeType\n\nfrom codetransformer.code import Flag\n\n\nINCLUDE_ATTRIBUTES_DEFAULT = False\nINDENT_DEFAULT = '  '\n\n__all__ = [\n    'a',\n    'd',\n    'display',\n    'pformat_ast',\n    'pprint_ast',\n]\n\n\ndef pformat_ast(node,\n                include_attributes=INCLUDE_ATTRIBUTES_DEFAULT,\n                indent=INDENT_DEFAULT):\n    \"\"\"\n    Pretty-format an AST tree element\n\n    Parameters\n    ----------\n    node : ast.AST\n       Top-level node to render.\n    include_attributes : bool, optional\n        Whether to include node attributes.  Default False.\n    indent : str, optional.\n        Indentation string for nested expressions.  Default is two spaces.\n    \"\"\"\n    def _fmt(node, prefix, level):\n\n        def with_indent(*strs):\n            return ''.join(((indent * level,) + strs))\n\n        with_prefix = partial(with_indent, prefix)\n\n        if isinstance(node, Name):\n            # Special Case:\n            # Render Name nodes on a single line.\n            yield with_prefix(\n                type(node).__name__,\n                '(id=',\n                repr(node.id),\n                ', ctx=',\n                type(node.ctx).__name__,\n                '()),',\n            )\n\n        elif isinstance(node, Num):\n            # Special Case:\n            # Render Num nodes on a single line without names.\n            yield with_prefix(\n                type(node).__name__,\n                '(%r),' % node.n,\n            )\n\n        elif isinstance(node, AST):\n            fields_attrs = list(\n                chain(\n                    iter_fields(node),\n                    iter_attributes(node) if include_attributes else (),\n                )\n            )\n            if not fields_attrs:\n                # Special Case:\n                # Render the whole expression on one line if there are no\n                # attributes.\n                yield with_prefix(type(node).__name__, '(),')\n                return\n\n            yield with_prefix(type(node).__name__, '(')\n            for name, value in fields_attrs:\n                yield from _fmt(value, name + '=', level + 1)\n            # Put a trailing comma if we're not at the top level.\n            yield with_indent(')', ',' if level > 0 else '')\n\n        elif isinstance(node, list):\n            if not node:\n                # Special Case:\n                # Render empty lists on one line.\n                yield with_prefix('[],')\n                return\n\n            yield with_prefix('[')\n            yield from chain.from_iterable(\n                map(partial(_fmt, prefix='', level=level + 1), node)\n            )\n            yield with_indent('],')\n        else:\n            yield with_prefix(repr(node), ',')\n\n    return '\\n'.join(_fmt(node, prefix='', level=0))\n\n\ndef _extend_name(prev, parent_co):\n    return prev + (\n        '.<locals>.' if parent_co.co_flags & Flag.CO_NEWLOCALS else '.'\n    )\n\n\ndef pprint_ast(node,\n               include_attributes=INCLUDE_ATTRIBUTES_DEFAULT,\n               indent=INDENT_DEFAULT,\n               file=None):\n    \"\"\"\n    Pretty-print an AST tree.\n\n    Parameters\n    ----------\n    node : ast.AST\n       Top-level node to render.\n    include_attributes : bool, optional\n        Whether to include node attributes.  Default False.\n    indent : str, optional.\n        Indentation string for nested expressions.  Default is two spaces.\n    file : None or file-like object, optional\n        File to use to print output.  If the default of `None` is passed, we\n        use sys.stdout.\n    \"\"\"\n    if file is None:\n        file = sys.stdout\n\n    print(\n        pformat_ast(\n            node,\n            include_attributes=include_attributes,\n            indent=indent\n        ),\n        file=file,\n    )\n\n\ndef walk_code(co, _prefix=''):\n    \"\"\"\n    Traverse a code object, finding all consts which are also code objects.\n\n    Yields pairs of (name, code object).\n    \"\"\"\n    name = _prefix + co.co_name\n    yield name, co\n    yield from chain.from_iterable(\n        walk_code(c, _prefix=_extend_name(name, co))\n        for c in co.co_consts\n        if isinstance(c, CodeType)\n    )\n\n\ndef iter_attributes(node):\n    attrs = node._attributes\n    if not attrs:\n        return\n\n    yield from zip(attrs, attrgetter(*attrs)(node))\n\n\ndef a(text, mode='exec', indent='  ', file=None):\n    \"\"\"\n    Interactive convenience for displaying the AST of a code string.\n\n    Writes a pretty-formatted AST-tree to `file`.\n\n    Parameters\n    ----------\n    text : str\n        Text of Python code to render as AST.\n    mode : {'exec', 'eval'}, optional\n        Mode for `ast.parse`.  Default is 'exec'.\n    indent : str, optional\n        String to use for indenting nested expressions.  Default is two spaces.\n    file : None or file-like object, optional\n        File to use to print output.  If the default of `None` is passed, we\n        use sys.stdout.\n    \"\"\"\n    pprint_ast(parse(text, mode=mode), indent=indent, file=file)\n\n\ndef d(obj, mode='exec', file=None):\n    \"\"\"\n    Interactive convenience for displaying the disassembly of a function,\n    module, or code string.\n\n    Compiles `text` and recursively traverses the result looking for `code`\n    objects to render with `dis.dis`.\n\n    Parameters\n    ----------\n    obj : str, CodeType, or object with __code__ attribute\n        Object to disassemble.\n        If `obj` is an instance of CodeType, we use it unchanged.\n        If `obj` is a string, we compile it with `mode` and then disassemble.\n        Otherwise, we look for a `__code__` attribute on `obj`.\n    mode : {'exec', 'eval'}, optional\n        Mode for `compile`.  Default is 'exec'.\n    file : None or file-like object, optional\n        File to use to print output.  If the default of `None` is passed, we\n        use sys.stdout.\n    \"\"\"\n    if file is None:\n        file = sys.stdout\n\n    for name, co in walk_code(extract_code(obj, compile_mode=mode)):\n        print(name, file=file)\n        print('-' * len(name), file=file)\n        dis.dis(co, file=file)\n        print('', file=file)\n\n\n@singledispatch\ndef extract_code(obj, compile_mode):\n    \"\"\"\n    Generic function for converting objects into instances of `CodeType`.\n    \"\"\"\n    try:\n        code = obj.__code__\n        if isinstance(code, CodeType):\n            return code\n        raise ValueError(\n            \"{obj} has a `__code__` attribute, \"\n            \"but it's an instance of {notcode!r}, not CodeType.\".format(\n                obj=obj,\n                notcode=type(code).__name__,\n            )\n        )\n    except AttributeError:\n        raise ValueError(\"Don't know how to extract code from %s.\" % obj)\n\n\n@extract_code.register(CodeType)\ndef _(obj, compile_mode):\n    return obj\n\n\n@extract_code.register(str)  # noqa\ndef _(obj, compile_mode):\n    return compile(obj, '<show>', compile_mode)\n\n\n_DISPLAY_TEMPLATE = \"\"\"\\\n====\nText\n====\n\n{text}\n\n====================\nAbstract Syntax Tree\n====================\n\n{ast}\n\n===========\nDisassembly\n===========\n\n{code}\n\"\"\"\n\n\ndef display(text, mode='exec', file=None):\n    \"\"\"\n    Show `text`, rendered as AST and as Bytecode.\n\n    Parameters\n    ----------\n    text : str\n        Text of Python code to render.\n    mode : {'exec', 'eval'}, optional\n        Mode for `ast.parse` and `compile`.  Default is 'exec'.\n    file : None or file-like object, optional\n        File to use to print output.  If the default of `None` is passed, we\n        use sys.stdout.\n    \"\"\"\n\n    if file is None:\n        file = sys.stdout\n\n    ast_section = StringIO()\n    a(text, mode=mode, file=ast_section)\n\n    code_section = StringIO()\n    d(text, mode=mode, file=code_section)\n\n    rendered = _DISPLAY_TEMPLATE.format(\n        text=text,\n        ast=ast_section.getvalue(),\n        code=code_section.getvalue(),\n    )\n    print(rendered, file=file)\n"
  },
  {
    "path": "codetransformer/utils/tests/__init__.py",
    "content": ""
  },
  {
    "path": "codetransformer/utils/tests/test_immutable.py",
    "content": "from inspect import getfullargspec\n\nimport pytest\n\nfrom codetransformer.utils.immutable import immutable\n\n\nclass a(immutable):\n    __slots__ = 'a',\n\n    def spec(__self, a):\n        pass\n\n\nclass b(immutable):\n    __slots__ = 'a', 'b'\n\n    def spec(__self, a, b):\n        pass\n\n\nclass c(immutable):\n    __slots__ = 'a', 'b', '*c'\n\n    def spec(__self, a, b, *c):\n        pass\n\n\nclass d(immutable):\n    __slots__ = 'a', 'b', '**c'\n\n    def spec(__self, a, b, **c):\n        pass\n\n\nclass e(immutable):\n    __slots__ = 'a', 'b', '*', 'c'\n\n    def spec(__self, a, b, *, c):\n        pass\n\n\nclass f(immutable):\n    __slots__ = 'a', 'b', '*c', 'd'\n\n    def spec(__self, a, b, *c, d):\n        pass\n\n\nclass g(immutable, defaults={'a': 1}):\n    __slots__ = 'a',\n\n    def spec(__self, a=1):\n        pass\n\n\nclass h(immutable, defaults={'b': 2}):\n    __slots__ = 'a', 'b'\n\n    def spec(__self, a, b=2):\n        pass\n\n\nclass i(immutable, defaults={'a': 1, 'b': 2}):\n    __slots__ = 'a', 'b'\n\n    def spec(__self, a=1, b=2):\n        pass\n\n\nclass j(immutable, defaults={'c': 3}):\n    __slots__ = 'a', 'b', '*', 'c'\n\n    def spec(__self, a, b, *, c=3):\n        pass\n\n\n@pytest.mark.parametrize('cls', (a, b, c, d, e, f, g, h, i, j))\ndef test_created_signature_single(cls):\n    assert getfullargspec(cls) == getfullargspec(cls.spec)\n\n\nclass k(immutable):\n    __slots__ = 'a',\n\n    def __init__(self, a):\n        pass\n\n\nclass l(immutable):\n    __slots__ = 'a',\n\n    def __init__(self, *a):\n        pass\n\n\nclass m(immutable):\n    __slots__ = 'a',\n\n    def __init__(self, **a):\n        pass\n\n\nclass n(immutable):\n    __slots__ = 'a',\n\n    def __init__(self, *, a):\n        pass\n\n\nclass o(immutable):\n    __slots__ = 'a', 'b'\n\n    def __init__(self, a, b=2):\n        pass\n\n\nclass p(immutable):\n    __slots__ = 'a', 'b'\n\n    def __init__(self, a=1, b=2):\n        pass\n\n\nclass q(immutable):\n    __slots__ = 'a', 'b'\n\n    def __init__(self, a, *b):\n        pass\n\n\nclass r(immutable):\n    __slots__ = 'a', 'b'\n\n    def __init__(self, a=1, *b):\n        pass\n\n\nclass s(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a=1, *b, c):\n        pass\n\n\nclass t(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a, *b, c=3):\n        pass\n\n\nclass u(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a=1, *b, c=3):\n        pass\n\n\nclass v(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a, **b):\n        pass\n\n\nclass w(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a, b, **c):\n        pass\n\n\nclass x(immutable):\n    __slots__ = 'a', 'b', 'c'\n\n    def __init__(self, a, *b, **c):\n        pass\n\n\nclass y(immutable):\n    __slots__ = 'a', 'b', 'c', 'd'\n\n    def __init__(self, a, *b, c, **d):\n        pass\n\n\nclass z(immutable):\n    __slots__ = 'a', 'b', 'c', 'd'\n\n    def __init__(self, a, *b, c=1, **d):\n        pass\n\n\n@pytest.mark.parametrize('cls', (\n    k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z,\n))\ndef test_preserve_custom_init_signature(cls):\n    assert getfullargspec(cls) == getfullargspec(cls.__init__)\n"
  },
  {
    "path": "codetransformer/utils/tests/test_pretty.py",
    "content": "from io import StringIO\nfrom textwrap import dedent\nfrom types import CodeType\n\nfrom ..pretty import a, walk_code\n\n\ndef test_a(capsys):\n    text = dedent(\n        \"\"\"\n        def inc(a):\n            b = a + 1\n            return b\n        \"\"\"\n    )\n    expected = dedent(\n        \"\"\"\\\n        Module(\n          body=[\n            FunctionDef(\n              name='inc',\n              args=arguments(\n                args=[\n                  arg(\n                    arg='a',\n                    annotation=None,\n                  ),\n                ],\n                vararg=None,\n                kwonlyargs=[],\n                kw_defaults=[],\n                kwarg=None,\n                defaults=[],\n              ),\n              body=[\n                Assign(\n                  targets=[\n                    Name(id='b', ctx=Store()),\n                  ],\n                  value=BinOp(\n                    left=Name(id='a', ctx=Load()),\n                    op=Add(),\n                    right=Num(1),\n                  ),\n                ),\n                Return(\n                  value=Name(id='b', ctx=Load()),\n                ),\n              ],\n              decorator_list=[],\n              returns=None,\n            ),\n          ],\n        )\n        \"\"\"\n    )\n\n    a(text)\n    stdout, stderr = capsys.readouterr()\n    assert stdout == expected\n    assert stderr == ''\n\n    file_ = StringIO()\n    a(text, file=file_)\n    assert capsys.readouterr() == ('', '')\n\n    result = file_.getvalue()\n    assert result == expected\n\n\ndef test_walk_code():\n    module = dedent(\n        \"\"\"\\\n        class Foo:\n            def bar(self):\n                def buzz():\n                    pass\n                def bazz():\n                    pass\n                return buzz\n        \"\"\"\n    )\n\n    co = compile(module, '<test>', 'exec')\n\n    foo = [c for c in co.co_consts if isinstance(c, CodeType)][0]\n    bar = [c for c in foo.co_consts if isinstance(c, CodeType)][0]\n    buzz = [c for c in bar.co_consts\n            if isinstance(c, CodeType) and c.co_name == 'buzz'][0]\n    bazz = [c for c in bar.co_consts\n            if isinstance(c, CodeType) and c.co_name == 'bazz'][0]\n\n    result = list(walk_code(co))\n    expected = [\n        ('<module>', co),\n        ('<module>.Foo', foo),\n        ('<module>.Foo.bar', bar),\n        ('<module>.Foo.bar.<locals>.buzz', buzz),\n        ('<module>.Foo.bar.<locals>.bazz', bazz),\n    ]\n\n    assert result == expected\n"
  },
  {
    "path": "docs/.dir-locals.el",
    "content": ";; Set compile-commnd for everything in this directory to\n;; \"make -C <this-directory> html\"\n\n;; This is an association list mapping directory prefixes (in this case nil,\n;; meaning \"all files\"), to another association list mapping dir-local variable\n;; names to values.  An equivalent Python structure would be something like:\n;; {None: {'compile-command': \"make -C .. html\"}}\n((nil . ((compile-command . (concat \"make -C .. html\")))))\n"
  },
  {
    "path": "docs/Makefile",
    "content": "# Makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = sphinx-build\nPAPER         =\nBUILDDIR      = build\n\n# User-friendly check for sphinx-build\nifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)\n$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)\nendif\n\n# Internal variables.\nPAPEROPT_a4     = -D latex_paper_size=a4\nPAPEROPT_letter = -D latex_paper_size=letter\nALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source\n# the i18n builder cannot share the environment and doctrees with the others\nI18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source\n\n.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext\n\nhelp:\n\t@echo \"Please use \\`make <target>' where <target> is one of\"\n\t@echo \"  html       to make standalone HTML files\"\n\t@echo \"  dirhtml    to make HTML files named index.html in directories\"\n\t@echo \"  singlehtml to make a single large HTML file\"\n\t@echo \"  pickle     to make pickle files\"\n\t@echo \"  json       to make JSON files\"\n\t@echo \"  htmlhelp   to make HTML files and a HTML help project\"\n\t@echo \"  qthelp     to make HTML files and a qthelp project\"\n\t@echo \"  applehelp  to make an Apple Help Book\"\n\t@echo \"  devhelp    to make HTML files and a Devhelp project\"\n\t@echo \"  epub       to make an epub\"\n\t@echo \"  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\"\n\t@echo \"  latexpdf   to make LaTeX files and run them through pdflatex\"\n\t@echo \"  latexpdfja to make LaTeX files and run them through platex/dvipdfmx\"\n\t@echo \"  text       to make text files\"\n\t@echo \"  man        to make manual pages\"\n\t@echo \"  texinfo    to make Texinfo files\"\n\t@echo \"  info       to make Texinfo files and run them through makeinfo\"\n\t@echo \"  gettext    to make PO message catalogs\"\n\t@echo \"  changes    to make an overview of all changed/added/deprecated items\"\n\t@echo \"  xml        to make Docutils-native XML files\"\n\t@echo \"  pseudoxml  to make pseudoxml-XML files for display purposes\"\n\t@echo \"  linkcheck  to check all external links for integrity\"\n\t@echo \"  doctest    to run all doctests embedded in the documentation (if enabled)\"\n\t@echo \"  coverage   to run coverage check of the documentation (if enabled)\"\n\nclean:\n\trm -rf $(BUILDDIR)/*\n\nhtml:\n\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/html.\"\n\nlivehtml:\n\tsphinx-autobuild -p 9999 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\ndirhtml:\n\t$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/dirhtml.\"\n\nsinglehtml:\n\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml\n\t@echo\n\t@echo \"Build finished. The HTML page is in $(BUILDDIR)/singlehtml.\"\n\npickle:\n\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle\n\t@echo\n\t@echo \"Build finished; now you can process the pickle files.\"\n\njson:\n\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json\n\t@echo\n\t@echo \"Build finished; now you can process the JSON files.\"\n\nhtmlhelp:\n\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp\n\t@echo\n\t@echo \"Build finished; now you can run HTML Help Workshop with the\" \\\n\t      \".hhp project file in $(BUILDDIR)/htmlhelp.\"\n\nqthelp:\n\t$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp\n\t@echo\n\t@echo \"Build finished; now you can run \"qcollectiongenerator\" with the\" \\\n\t      \".qhcp project file in $(BUILDDIR)/qthelp, like this:\"\n\t@echo \"# qcollectiongenerator $(BUILDDIR)/qthelp/codetransformer.qhcp\"\n\t@echo \"To view the help file:\"\n\t@echo \"# assistant -collectionFile $(BUILDDIR)/qthelp/codetransformer.qhc\"\n\napplehelp:\n\t$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp\n\t@echo\n\t@echo \"Build finished. The help book is in $(BUILDDIR)/applehelp.\"\n\t@echo \"N.B. You won't be able to view it unless you put it in\" \\\n\t      \"~/Library/Documentation/Help or install it in your application\" \\\n\t      \"bundle.\"\n\ndevhelp:\n\t$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp\n\t@echo\n\t@echo \"Build finished.\"\n\t@echo \"To view the help file:\"\n\t@echo \"# mkdir -p $$HOME/.local/share/devhelp/codetransformer\"\n\t@echo \"# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/codetransformer\"\n\t@echo \"# devhelp\"\n\nepub:\n\t$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub\n\t@echo\n\t@echo \"Build finished. The epub file is in $(BUILDDIR)/epub.\"\n\nlatex:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo\n\t@echo \"Build finished; the LaTeX files are in $(BUILDDIR)/latex.\"\n\t@echo \"Run \\`make' in that directory to run these through (pdf)latex\" \\\n\t      \"(use \\`make latexpdf' here to do that automatically).\"\n\nlatexpdf:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through pdflatex...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\nlatexpdfja:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through platex and dvipdfmx...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\ntext:\n\t$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text\n\t@echo\n\t@echo \"Build finished. The text files are in $(BUILDDIR)/text.\"\n\nman:\n\t$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man\n\t@echo\n\t@echo \"Build finished. The manual pages are in $(BUILDDIR)/man.\"\n\ntexinfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo\n\t@echo \"Build finished. The Texinfo files are in $(BUILDDIR)/texinfo.\"\n\t@echo \"Run \\`make' in that directory to run these through makeinfo\" \\\n\t      \"(use \\`make info' here to do that automatically).\"\n\ninfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo \"Running Texinfo files through makeinfo...\"\n\tmake -C $(BUILDDIR)/texinfo info\n\t@echo \"makeinfo finished; the Info files are in $(BUILDDIR)/texinfo.\"\n\ngettext:\n\t$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale\n\t@echo\n\t@echo \"Build finished. The message catalogs are in $(BUILDDIR)/locale.\"\n\nchanges:\n\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes\n\t@echo\n\t@echo \"The overview file is in $(BUILDDIR)/changes.\"\n\nlinkcheck:\n\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck\n\t@echo\n\t@echo \"Link check complete; look for any errors in the above output \" \\\n\t      \"or in $(BUILDDIR)/linkcheck/output.txt.\"\n\ndoctest:\n\t$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest\n\t@echo \"Testing of doctests in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/doctest/output.txt.\"\n\ncoverage:\n\t$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage\n\t@echo \"Testing of coverage in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/coverage/python.txt.\"\n\nxml:\n\t$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml\n\t@echo\n\t@echo \"Build finished. The XML files are in $(BUILDDIR)/xml.\"\n\npseudoxml:\n\t$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml\n\t@echo\n\t@echo \"Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml.\"\n"
  },
  {
    "path": "docs/source/appendix.rst",
    "content": "API Reference\n=============\n\n``codetransformer.transformers``\n--------------------------------\n\n.. automodule:: codetransformer.transformers\n   :members:\n\n.. autodata:: islice_literals\n   :annotation:\n\n.. data:: bytearray_literals\n\n   A transformer that converts :class:`bytes` literals to :class:`bytearray`.\n\n.. data:: decimal_literals\n\n   A transformer that converts :class:`float` literals to :class:`~decimal.Decimal`.\n\n``codetransformer.code``\n------------------------\n\n.. autoclass:: codetransformer.code.Code\n   :members:\n\n.. autoclass:: codetransformer.code.Flag\n   :members:\n   :undoc-members:\n\n``codetransformer.core``\n------------------------\n\n.. autoclass:: codetransformer.core.CodeTransformer\n   :members:\n\n``codetransformer.instructions``\n--------------------------------\n\nFor details on particular instructions, see `the dis stdlib module docs.`_\n\n.. automodule:: codetransformer.instructions\n   :members:\n   :undoc-members:\n\n\n``codetransformer.patterns``\n----------------------------\n\n.. autoclass:: codetransformer.patterns.pattern\n\n.. autodata:: codetransformer.patterns.DEFAULT_STARTCODE\n\nDSL Objects\n~~~~~~~~~~~\n\n.. autodata:: codetransformer.patterns.matchany\n.. autoclass:: codetransformer.patterns.seq\n.. autodata:: codetransformer.patterns.var\n.. autodata:: codetransformer.patterns.plus\n.. autodata:: codetransformer.patterns.option\n\n``codetransformer.utils``\n-------------------------\n\n.. automodule:: codetransformer.utils.pretty\n   :members:\n\n.. automodule:: codetransformer.utils.immutable\n   :members: immutable, lazyval, immutableattr\n\n.. automodule:: codetransformer.utils.functional\n   :members:\n\n\n``codetransformer.decompiler``\n------------------------------\n\n.. automodule:: codetransformer.decompiler\n   :members: decompile, pycode_to_body, DecompilationContext, DecompilationError\n\n.. _`the dis stdlib module docs.` : https://docs.python.org/3.4/library/dis.html#python-bytecode-instructions\n"
  },
  {
    "path": "docs/source/code-objects.rst",
    "content": "===========================\n Working with Code Objects\n===========================\n\nThe :class:`~codetransformer.code.Code` type is the foundational abstraction in\n``codetransformer``.  It provides high-level APIs for working with\nlogically-grouped sets of instructions and for converting to and from CPython's\nnative :class:`code <types.CodeType>` type.\n\nConstructing Code Objects\n=========================\n\nThe most common way constructing a Code object is to use the\n:meth:`~codetransformer.code.Code.from_pycode` classmethod, which accepts a\nCPython :class:`code <types.CodeType>` object.\n\nThere are two common ways of building raw code objects:\n\n- CPython functions have a ``__code__`` attribute, which contains the bytecode\n  executed by the function.\n- The :func:`compile` builtin can compile a string of Python source code into a\n  code object.\n\nUsing :meth:`~codetransformer.code.Code.from_pycode`, we can build a Code\nobject and inspect its contents::\n\n    >>> from codetransformer import Code\n    >>> def add2(x):\n    ...     return x + 2\n    ...\n    >>> co = Code.from_pycode(add.__code__)\n    >>> co.instrs\n    (LOAD_FAST('x'), LOAD_CONST(2), BINARY_ADD, RETURN_VALUE)\n    >>> co.argnames\n    ('x',)\n    >>> c.consts\n    (2,)\n\nWe can convert our Code object back into its raw form via the\n:meth:`~codetransformer.code.Code.to_pycode` method::\n\n    >>> co.to_pycode()\n    <code object add2 at 0x7f6ba05f2030, file \"<stdin>\", line 1>\n\nBuilding Transformers\n=====================\n\nOnce we have the ability to convert to and from an abstract code\nrepresentation, we gain the ability to perform transformations on that abtract\nrepresentation.\n\nLet's say that we want to replace the addition operation in our ``add2``\nfunction with a multiplication. We could try to mutate our\n:class:`~codetransformer.code.Code` object directly before converting back to\nPython bytecode, but there are many subtle invariants [#f1]_ between the\ninstructions and the other pieces of metadata that must be maintained to ensure\nthat the generated output can be executed correctly.\n\nRather than encourage users to mutate Code objects in place,\n``codetransformer`` provides the :class:`~codetransformer.core.CodeTransformer`\nclass, which allows users to declaratively describe operations to perform on\nsequences of instructions.\n\nImplemented as a :class:`~codetransformer.core.CodeTransformer`, our \"replace\nadditions with multiplications\" operation looks like this:\n\n.. literalinclude:: add2mul.py\n   :language: python\n   :lines: 10-\n\nThe important piece here is the ``_add2mul`` method, which has been decorated\nwith a :class:`~codetransformer.patterns.pattern`. Patterns provide an API for\ndescribing sequences of instructions to match against for replacement and/or\nmodification.  The :class:`~codetransformer.core.CodeTransformer` base class\nlooks at methods with registered patterns and compares them against the\ninstructions of the Code object under transformation.  For each matching\nsequence of instructions, the decorated method is called with all matching\ninstructions \\*-unpacked into the method.  The method's job is to take the\ninput instructions and return an iterable of new instructions to serve as\nreplacements. It is often convenient to implement transformer methods as\n`generator functions`_, as we've done here.\n\nIn this example, we've supplied the simplest possible pattern: a single\ninstruction type to match. [#f2]_ Our transformer method will be called on\nevery ``BINARY_ADD`` instruction in the target code object, and it will yield a\n``BINARY_MULTIPLY`` as replacement each time.\n\nApplying Transformers\n=====================\n\nTo apply a :class:`~codetransformer.core.CodeTransformer` to a function, we\nconstruct an instance of the transformer and call it on the function we want to\nmodify.  The result is a new function whose instructions have been rewritten\napplying our transformer's methods to matched sequences of the input function's\ninstructions.  The original function is not mutated in place.\n\n**Example:**\n\n.. code-block:: python\n\n  >>> transformer = add2mul()\n  >>> mul2 = transformer(add2) # mult2 is a brand-new function\n  >>> mul2(5)\n  10\n\nWhen we don't care about having access to the pre-transformed version of a\nfunction, it's convenient and idiomatic to apply transformers as decorators::\n\n >>> @add2mul()\n ... def mul2(x):\n ...     return x + 2\n ...\n >>> mul2(5)\n 10\n\n.. [#f1] For example, if we add a new constant, we have to ensure that we\n         correctly maintain the indices of existing constants in the generated\n         code's ``co_consts``, and if we replace an instruction that was the\n         target of a jump, we have to make sure that the jump instruction\n         resolves correctly to our new instruction.\n\n.. [#f2] Many more complex patterns are possible.  See the docs for\n         :class:`codetransformer.patterns.pattern` for more examples.\n.. _`generator functions` : https://docs.python.org/2/tutorial/classes.html#generators\n"
  },
  {
    "path": "docs/source/conf.py",
    "content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# codetransformer documentation build configuration file, created by\n# sphinx-quickstart on Sat Sep  5 21:06:06 2015.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\nimport shlex\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n# sys.path.insert(0, os.path.abspath('..'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    'sphinx.ext.autodoc',\n    'sphinx.ext.autosummary',\n    'sphinx.ext.doctest',\n    'sphinx.ext.intersphinx',\n    'sphinx.ext.todo',\n    'sphinx.ext.coverage',\n    'sphinx.ext.viewcode',\n    'sphinx.ext.napoleon',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'codetransformer'\ncopyright = '2016, Joe Jevnik and Scott Sanderson'\nauthor = 'Joe Jevnik and Scott Sanderson'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = '0.6.0'\n# The full version, including alpha/beta/rc tags.\nrelease = '0.6.0'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = []\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = True\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents.  If None, it defaults to\n# \"<project> v<release> documentation\".\n#html_title = None\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#html_logo = None\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n#   'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'\n#   'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'codetransformerdoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n  (master_doc, 'codetransformer.tex', 'codetransformer Documentation',\n   'Joe Jevnik and Scott Sanderson', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    (master_doc, 'codetransformer', 'codetransformer Documentation',\n     [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n  (master_doc, 'codetransformer', 'codetransformer Documentation',\n   author, 'codetransformer', 'One line description of project.',\n   'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {'https://docs.python.org/3/': None}\n\n# This makes a big difference for Code's many attributes.\nnapoleon_use_ivar = True\n"
  },
  {
    "path": "docs/source/index.rst",
    "content": "codetransformer\n===============\n\nBytecode transformers for CPython inspired by the ``ast`` module's\n``NodeTransformer``.\n\n``codetransformer`` is a library that provides utilities for working with\nCPython bytecode at runtime.  Among other things, it provides:\n\n- A :class:`~codetransformer.code.Code` type for representing and manipulating\n  Python bytecode.\n- An :class:`~codetransformer.instructions.Instruction` type, with\n  :class:`subclasses <codetransformer.instructions.BINARY_ADD>` for each opcode\n  used by the CPython interpreter.\n- A :class:`~codetransformer.core.CodeTransformer` type providing a\n  pattern-based API for describing transformations on\n  :class:`~codetransformer.code.Code` objects.  Example transformers can be\n  found in :mod:`codetransformer.transformers`.\n- An experimental :mod:`decompiler <codetransformer.decompiler>` for\n  determining the AST tree that would generate a code object.\n\nThe existence of ``codetransformer`` is motivated by the desire to override\nparts of the python language that cannot be easily hooked via more standard\nmeans. Examples of program transformations made possible using code\ntransformers include:\n\n* Overriding the ``is`` and ``not`` operators.\n* `Overloading Python's data structure literals`_.\n* `Optimizing functions by freezing globals as constants`_.\n* `Exception handlers that match on exception instances`_.\n\nContents:\n\n.. toctree::\n   :maxdepth: 2\n\n   code-objects.rst\n   patterns.rst\n   magics.rst\n   appendix.rst\n\n\nIndices and tables\n------------------\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n\n.. _lazy: https://github.com/llllllllll/lazy_python\n.. _Overloading Python's data structure literals: appendix.html\\#codetransformer.transformers.literals.overloaded_dicts\n.. _Optimizing functions by freezing globals as constants: appendix.html#codetransformer.transformers.asconstants\n.. _Exception handlers that match on exception instances: appendix.html#codetransformer.transformers.exc_patterns.pattern_matched_exceptions\n"
  },
  {
    "path": "docs/source/magics.rst",
    "content": "Interactive Conveniences\n========================\n\nWhen developing projects using :mod:`codetransformer`, it's often helpful to be\nable to quickly and easily visualize the AST and/or disassembly generated by\nCPython for a given source text.\n\nThe :mod:`codetransformer.utils.pretty` module provides utilities for viewing\nAST trees and the disassembly of nested code objects:\n\n.. autosummary::\n\n   ~codetransformer.utils.pretty.a\n   ~codetransformer.utils.pretty.d\n   ~codetransformer.utils.pretty.display\n   ~codetransformer.utils.pretty.extract_code\n\nFor users of `IPython`_, :mod:`codetransformer` provides an IPython extension\nthat adds ``%%ast`` and ``%%dis`` magics.\n\n.. code-block:: python\n\n    In [1]: %load_ext codetransformer\n    In [2]: %%dis\n       ...: def foo(a, b):\n       ...:     return a + b\n       ...:\n    <module>\n    --------\n      1           0 LOAD_CONST               0 (<code object foo at 0x7f4c428a9b70, file \"<show>\", line 1>)\n                  3 LOAD_CONST               1 ('foo')\n                  6 MAKE_FUNCTION            0\n                  9 STORE_NAME               0 (foo)\n                 12 LOAD_CONST               2 (None)\n                 15 RETURN_VAL\n\n    <module>.foo\n    ------------\n      2           0 LOAD_FAST                0 (a)\n                  3 LOAD_FAST                1 (b)\n                  6 BINARY_ADD\n                  7 RETURN_VAL\n\n\n   In [3]: %%ast\n       ...: def foo(a, b):\n       ...:     return a + b\n       ...:\n    Module(\n      body=[\n        FunctionDef(\n          name='foo',\n          args=arguments(\n            args=[\n              arg(\n                arg='a',\n                annotation=None,\n              ),\n              arg(\n                arg='b',\n                annotation=None,\n              ),\n            ],\n            vararg=None,\n            kwonlyargs=[],\n            kw_defaults=[],\n            kwarg=None,\n            defaults=[],\n          ),\n          body=[\n            Return(\n              value=BinOp(\n                left=Name(id='a', ctx=Load()),\n                op=Add(),\n                right=Name(id='b', ctx=Load()),\n              ),\n            ),\n          ],\n          decorator_list=[],\n          returns=None,\n        ),\n      ],\n    )\n\n.. _`IPython` : https://ipython.readthedocs.org/en/stable/\n"
  },
  {
    "path": "docs/source/patterns.rst",
    "content": "============\n Pattern API\n============\n\nMost bytecode transformations are best expressed by identifying a pattern in the\nbytecode and emitting some replacement. ``codetransformer`` makes it easy to\nexpress and work on these patterns by defining a small dsl for use in\n:class:`~codetransformer.core.CodeTransformer` classes.\n\nMatchables\n==========\n\nA pattern is expressed by a sequence of matchables paired with the startcode. A\nmatchable is anything that we can compare a sequence of bytecode to.\n\nInstructions\n------------\n\nThe most atomic matchable is any\n:class:`~codetransformer.instructions.Instruction` class. These classes each can\nbe used to define a pattern that matches instances of that instruction. For\nexample, the pattern::\n\n  LOAD_CONST\n\nwill match a single :class:`~codetransformer.instructions.LOAD_CONST` instance.\n\nAll matchables support the following operations:\n\n``or``\n------\n\nMatchables can be or'd together to create a new matchable that matches either\nthe lhs or the rhs. For example::\n\n  LOAD_CONST | LOAD_FAST\n\nwill match a either a single :class:`~codetransformer.instructions.LOAD_CONST`\nor a :class:`~codetransformer.instructions.LOAD_FAST`.\n\n``not``\n-------\n\nMatchables may be negated to create a new matchable that matches anything the\noriginal did not match. For example::\n\n  ~LOAD_CONST\n\nwill match any instruction except an instance of\n:class:`~codetransformer.instructions.LOAD_CONST`.\n\n``matchrange``\n--------------\n\nIt is possible to create a matchable from another such that it matches the same\npattern repeated multiple times. For example::\n\n  LOAD_CONST[3]\n\nwill match exactly three :class:`~codetransformer.instructions.LOAD_CONST`\ninstances in a row. This will not match on any less than three and will match on\nthe first three if there are more than three\n:class:`~codetransformer.instructions.LOAD_CONST` instructions in a row.\n\nThis can be specified with an upper bound also like::\n\n  LOAD_CONST[3, 5]\n\nThis matches between three and five\n:class:`~codetransformer.instructions.LOAD_CONST` instructions. This is greedy\nmeaning that if four or five :class:`~codetransformer.instructions.LOAD_CONST`\ninstructions exist it will consume as many as possible up to five.\n\n``var``\n-------\n\n:data:`~codetransformer.patterns.var` is a modifier that matches zero or more\ninstances of another matchable. For example::\n\n  LOAD_CONST[var]\n\nwill match as many :class:`~codetransformer.instructions.LOAD_CONST`\ninstructions appear in a row or an empty instruction set.\n\n``plus``\n--------\n\n:data:`~codetransformer.patterns.plus` is a modifier that matches one or more\ninstances of another matchable. For example::\n\n  LOAD_CONST[plus]\n\nwill match as many :class:`~codetransformer.instructions.LOAD_CONST`\ninstructions appear in a row as long as there is at least one.\n\n``option``\n----------\n\n:data:`~codetransformer.patterns.option` is a modifier that matches zero or one\ninstance of another matchable. For example::\n\n  LOAD_CONST[option]\n\nwill match either an empty instruction set or exactly one\n:class:`~codetransformer.instructions.LOAD_CONST`.\n\n``matchany``\n------------\n\n:data:`~codetransformer.patterns.matchany` is a special matchable that matches\nany single instruction. ``...`` is an alias for\n:data:`~codetransformer.patterns.matchany`.\n\n``seq``\n-------\n\n:class:`~codetransformer.patterns.seq` is a matchable that matches a sequence of\nother matchables. For example::\n\n  seq(LOAD_CONST, ..., ~LOAD_CONST)\n\nwill match a single :class:`~codetransformer.instructions.LOAD_CONST` followed\nby any instruction followed by any instruction that is not a\n:class:`~codetransformer.instructions.LOAD_CONST`. This example show how we can\ncompose all of our matchable together to build more complex matchables.\n\n``pattern``\n===========\n\nIn order to use our DSL we need a way to register transformations to these\nmatchables. To do this we may decorate methods of a\n:class:`~codetransformer.core.CodeTransformer` with\n:class:`~codetransformer.patterns.pattern`. This registers the function to the\npattern. For example::\n\n  class MyTransformer(CodeTransformer):\n      @pattern(LOAD_CONST, ..., ~LOAD_CONST)\n      def _f(self, load_const, any, not_load_const):\n          ...\n\nThe argument list of a :class:`~codetransformer.patterns.pattern` is implicitly\nmade into a `seq`_. When using ``MyTransformer`` to transform some bytecode\n``_f`` will be called  only when we see a\n:class:`~codetransformer.instructions.LOAD_CONST` followed by any instruction\nfollowed by any instruction that is not a\n:class:`~codetransformer.instructions.LOAD_CONST`. This function will be passed\nthese three instruction objects positionally and should yield the instructions\nto replace them with.\n\nResolution Order\n----------------\n\nPatterns are checked in the order they are defined in the class body. This is\nbecause some patterns may overlap with eachother. For example, given the two\nclasses::\n\n  class OrderOne(CodeTransformer):\n      @pattern(LOAD_CONST)\n      def _load_const(self, instr):\n          print('LOAD_CONST')\n          yield instr\n\n      @pattern(...)\n      def _any(self, instr):\n          print('...')\n          yield instr\n\n\n  class OrderTwo(CodeTransformer):\n      @pattern(...)\n      def _any(self, instr):\n          print('...')\n          yield instr\n\n      @pattern(LOAD_CONST)\n      def _load_const(self, instr):\n          print('LOAD_CONST')\n          yield instr\n\n\n\n\nand the following bytecode sequence::\n\n  LOAD_CONST POP_TOP LOAD_CONST RETURN_VALUE\n\nWhen running with ``OrderOne`` we would see::\n\n\n  LOAD_CONST\n  ...\n  LOAD_CONST\n  ...\n\nbut when running with ``OrderTwo``::\n\n  ...\n  ...\n  ...\n  ...\n\nThis is because we will always match on the ``...`` pattern where ``OrderOne``\nwill check against :class:`~codetransformer.instructions.LOAD_CONST` before\nfalling back to the :data:`~codetransformer.instructions.matchany`.\n\nContextual Patterns\n-------------------\n\nSometimes a pattern should only be matched given that some condition has been\nmet. An example of this is that you want to modify comprehensions. In order to\nbe sure that you are only modifying the bodies of the comprehensions we must\nonly match when we know we are in\none. :class:`~codetransformer.patterns.pattern` accepts a keyword only argument\n``startcodes`` which is a set of contexts where this pattern should apply. By\ndefault this is :data:`~codetransformer.patterns.DEFAULT_STARTCODE` which is the\ndefault state. A startcode may be anything hashable; however it is best to use\nstrings or integer constants to make it easy to debug.\n\nThe :meth:`~codetransformer.core.CodeTransformer.begin` method enters a new\nstartcode. For example::\n\n  class FindDictComprehensions(CodeTransformer):\n      @pattern(BUILD_MAP, matchany[var], MAP_ADD)\n      def _start_comprehension(self, *instrs):\n          print('starting dict comprehension')\n          self.begin('in_comprehension')\n          yield from instrs\n\n      @pattern(RETURN_VALUE, startcodes=('in_comprehension',))\n      def _return_from_comprehension(self, instr):\n          print('returning from comprehension')\n          yield instr\n\n      @pattern(RETURN_VALUE)\n      def _return_default(self, instr):\n          print('returning from non-comprehension')\n          yield instr\n\n\nThis transformer will find dictionary comprehensions and enter a new\nstartcode. Inside this startcode we will handle\n:class:`~codetransformer.instructions.RETURN_VALUE` instructions differently.\n\n.. code-block:: python\n\n   >>> @FindDictComprehensions()\n   ... def f():\n   ...     pass\n   ...\n   returning from non-comprehension\n\n   >>> @FindDictComprehensions()\n   ... def g():\n   ...     {a: b for a, b in it}\n   ...\n   starting dict comprehension\n   returning from comprehension\n   returning from non-comprehension\n\n\nIt is important to remember that when we recurse into a nested code object (like\na comprehension) that we do not inherit the startcode from our parent. Instead\nit always starts at :data:`~codetransformer.patterns.DEFAULT_STARTCODE`.\n"
  },
  {
    "path": "requirements_doc.txt",
    "content": "Sphinx==1.3.5\nsphinx-rtd-theme==0.1.9\n"
  },
  {
    "path": "setup.cfg",
    "content": "# See the docstring in versioneer.py for instructions. Note that you must\n# re-run 'versioneer.py setup' after changing this section, and commit the\n# resulting files.\n[versioneer]\nVCS=git\nstyle=pep440\nversionfile_source=codetransformer/_version.py\nversionfile_build=codetransformer/_version.py\ntag_prefix=\nparentdir_prefix=codetransformer-\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python\nfrom setuptools import setup, find_packages\nimport sys\n\nimport versioneer\n\nlong_description = ''\n\nif 'upload' in sys.argv:\n    with open('README.rst') as f:\n        long_description = f.read()\n\n\nsetup(\n    name='codetransformer',\n    version=versioneer.get_version(),\n    cmdclass=versioneer.get_cmdclass(),\n    description='Python code object transformers',\n    author='Joe Jevnik and Scott Sanderson',\n    author_email='joejev@gmail.com',\n    packages=find_packages(),\n    long_description=long_description,\n    license='GPL-2',\n    classifiers=[\n        'Development Status :: 3 - Alpha',\n        'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',\n        'Natural Language :: English',\n        'Programming Language :: Python :: 3.4',\n        'Programming Language :: Python :: 3.5',\n        'Programming Language :: Python :: 3 :: Only',\n        'Programming Language :: Python :: Implementation :: CPython',\n        'Operating System :: POSIX',\n        'Topic :: Software Development :: Pre-processors',\n    ],\n    url='https://github.com/llllllllll/codetransformer',\n    install_requires=['toolz'],\n    extras_require={\n        'dev': [\n            'flake8==3.3.0',\n            'pytest==2.8.4',\n            'pytest-cov==2.2.1',\n        ],\n    },\n)\n"
  },
  {
    "path": "tox.ini",
    "content": "[tox]\nenvlist=py{34,35,36}\nskip_missing_interpreters=True\n\n[testenv]\ncommands=\n    pip install -e .[dev]\n    py.test\n\n[pytest]\naddopts = --doctest-modules --cov codetransformer --cov-report term-missing --ignore setup.py\ntestpaths = codetransformer\nnorecursedirs = decompiler\n"
  },
  {
    "path": "versioneer.py",
    "content": "\n# Version: 0.15\n\n\"\"\"\nThe Versioneer\n==============\n\n* like a rocketeer, but for versions!\n* https://github.com/warner/python-versioneer\n* Brian Warner\n* License: Public Domain\n* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy\n* [![Latest Version]\n(https://pypip.in/version/versioneer/badge.svg?style=flat)\n](https://pypi.python.org/pypi/versioneer/)\n* [![Build Status]\n(https://travis-ci.org/warner/python-versioneer.png?branch=master)\n](https://travis-ci.org/warner/python-versioneer)\n\nThis is a tool for managing a recorded version number in distutils-based\npython projects. The goal is to remove the tedious and error-prone \"update\nthe embedded version string\" step from your release process. Making a new\nrelease should be as easy as recording a new tag in your version-control\nsystem, and maybe making new tarballs.\n\n\n## Quick Install\n\n* `pip install versioneer` to somewhere to your $PATH\n* add a `[versioneer]` section to your setup.cfg (see below)\n* run `versioneer install` in your source tree, commit the results\n\n## Version Identifiers\n\nSource trees come from a variety of places:\n\n* a version-control system checkout (mostly used by developers)\n* a nightly tarball, produced by build automation\n* a snapshot tarball, produced by a web-based VCS browser, like github's\n  \"tarball from tag\" feature\n* a release tarball, produced by \"setup.py sdist\", distributed through PyPI\n\nWithin each source tree, the version identifier (either a string or a number,\nthis tool is format-agnostic) can come from a variety of places:\n\n* ask the VCS tool itself, e.g. \"git describe\" (for checkouts), which knows\n  about recent \"tags\" and an absolute revision-id\n* the name of the directory into which the tarball was unpacked\n* an expanded VCS keyword ($Id$, etc)\n* a `_version.py` created by some earlier build step\n\nFor released software, the version identifier is closely related to a VCS\ntag. Some projects use tag names that include more than just the version\nstring (e.g. \"myproject-1.2\" instead of just \"1.2\"), in which case the tool\nneeds to strip the tag prefix to extract the version identifier. For\nunreleased software (between tags), the version identifier should provide\nenough information to help developers recreate the same tree, while also\ngiving them an idea of roughly how old the tree is (after version 1.2, before\nversion 1.3). Many VCS systems can report a description that captures this,\nfor example `git describe --tags --dirty --always` reports things like\n\"0.7-1-g574ab98-dirty\" to indicate that the checkout is one revision past the\n0.7 tag, has a unique revision id of \"574ab98\", and is \"dirty\" (it has\nuncommitted changes.\n\nThe version identifier is used for multiple purposes:\n\n* to allow the module to self-identify its version: `myproject.__version__`\n* to choose a name and prefix for a 'setup.py sdist' tarball\n\n## Theory of Operation\n\nVersioneer works by adding a special `_version.py` file into your source\ntree, where your `__init__.py` can import it. This `_version.py` knows how to\ndynamically ask the VCS tool for version information at import time.\n\n`_version.py` also contains `$Revision$` markers, and the installation\nprocess marks `_version.py` to have this marker rewritten with a tag name\nduring the `git archive` command. As a result, generated tarballs will\ncontain enough information to get the proper version.\n\nTo allow `setup.py` to compute a version too, a `versioneer.py` is added to\nthe top level of your source tree, next to `setup.py` and the `setup.cfg`\nthat configures it. This overrides several distutils/setuptools commands to\ncompute the version when invoked, and changes `setup.py build` and `setup.py\nsdist` to replace `_version.py` with a small static file that contains just\nthe generated version data.\n\n## Installation\n\nFirst, decide on values for the following configuration variables:\n\n* `VCS`: the version control system you use. Currently accepts \"git\".\n\n* `style`: the style of version string to be produced. See \"Styles\" below for\n  details. Defaults to \"pep440\", which looks like\n  `TAG[+DISTANCE.gSHORTHASH[.dirty]]`.\n\n* `versionfile_source`:\n\n  A project-relative pathname into which the generated version strings should\n  be written. This is usually a `_version.py` next to your project's main\n  `__init__.py` file, so it can be imported at runtime. If your project uses\n  `src/myproject/__init__.py`, this should be `src/myproject/_version.py`.\n  This file should be checked in to your VCS as usual: the copy created below\n  by `setup.py setup_versioneer` will include code that parses expanded VCS\n  keywords in generated tarballs. The 'build' and 'sdist' commands will\n  replace it with a copy that has just the calculated version string.\n\n  This must be set even if your project does not have any modules (and will\n  therefore never import `_version.py`), since \"setup.py sdist\" -based trees\n  still need somewhere to record the pre-calculated version strings. Anywhere\n  in the source tree should do. If there is a `__init__.py` next to your\n  `_version.py`, the `setup.py setup_versioneer` command (described below)\n  will append some `__version__`-setting assignments, if they aren't already\n  present.\n\n* `versionfile_build`:\n\n  Like `versionfile_source`, but relative to the build directory instead of\n  the source directory. These will differ when your setup.py uses\n  'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,\n  then you will probably have `versionfile_build='myproject/_version.py'` and\n  `versionfile_source='src/myproject/_version.py'`.\n\n  If this is set to None, then `setup.py build` will not attempt to rewrite\n  any `_version.py` in the built tree. If your project does not have any\n  libraries (e.g. if it only builds a script), then you should use\n  `versionfile_build = None` and override `distutils.command.build_scripts`\n  to explicitly insert a copy of `versioneer.get_version()` into your\n  generated script.\n\n* `tag_prefix`:\n\n  a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.\n  If your tags look like 'myproject-1.2.0', then you should use\n  tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this\n  should be an empty string.\n\n* `parentdir_prefix`:\n\n  a optional string, frequently the same as tag_prefix, which appears at the\n  start of all unpacked tarball filenames. If your tarball unpacks into\n  'myproject-1.2.0', this should be 'myproject-'. To disable this feature,\n  just omit the field from your `setup.cfg`.\n\nThis tool provides one script, named `versioneer`. That script has one mode,\n\"install\", which writes a copy of `versioneer.py` into the current directory\nand runs `versioneer.py setup` to finish the installation.\n\nTo versioneer-enable your project:\n\n* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and\n  populating it with the configuration values you decided earlier (note that\n  the option names are not case-sensitive):\n\n  ````\n  [versioneer]\n  VCS = git\n  style = pep440\n  versionfile_source = src/myproject/_version.py\n  versionfile_build = myproject/_version.py\n  tag_prefix = \"\"\n  parentdir_prefix = myproject-\n  ````\n\n* 2: Run `versioneer install`. This will do the following:\n\n  * copy `versioneer.py` into the top of your source tree\n  * create `_version.py` in the right place (`versionfile_source`)\n  * modify your `__init__.py` (if one exists next to `_version.py`) to define\n    `__version__` (by calling a function from `_version.py`)\n  * modify your `MANIFEST.in` to include both `versioneer.py` and the\n    generated `_version.py` in sdist tarballs\n\n  `versioneer install` will complain about any problems it finds with your\n  `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all\n  the problems.\n\n* 3: add a `import versioneer` to your setup.py, and add the following\n  arguments to the setup() call:\n\n        version=versioneer.get_version(),\n        cmdclass=versioneer.get_cmdclass(),\n\n* 4: commit these changes to your VCS. To make sure you won't forget,\n  `versioneer install` will mark everything it touched for addition using\n  `git add`. Don't forget to add `setup.py` and `setup.cfg` too.\n\n## Post-Installation Usage\n\nOnce established, all uses of your tree from a VCS checkout should get the\ncurrent version string. All generated tarballs should include an embedded\nversion string (so users who unpack them will not need a VCS tool installed).\n\nIf you distribute your project through PyPI, then the release process should\nboil down to two steps:\n\n* 1: git tag 1.0\n* 2: python setup.py register sdist upload\n\nIf you distribute it through github (i.e. users use github to generate\ntarballs with `git archive`), the process is:\n\n* 1: git tag 1.0\n* 2: git push; git push --tags\n\nVersioneer will report \"0+untagged.NUMCOMMITS.gHASH\" until your tree has at\nleast one tag in its history.\n\n## Version-String Flavors\n\nCode which uses Versioneer can learn about its version string at runtime by\nimporting `_version` from your main `__init__.py` file and running the\n`get_versions()` function. From the \"outside\" (e.g. in `setup.py`), you can\nimport the top-level `versioneer.py` and run `get_versions()`.\n\nBoth functions return a dictionary with different flavors of version\ninformation:\n\n* `['version']`: A condensed version string, rendered using the selected\n  style. This is the most commonly used value for the project's version\n  string. The default \"pep440\" style yields strings like `0.11`,\n  `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the \"Styles\" section\n  below for alternative styles.\n\n* `['full-revisionid']`: detailed revision identifier. For Git, this is the\n  full SHA1 commit id, e.g. \"1076c978a8d3cfc70f408fe5974aa6c092c949ac\".\n\n* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that\n  this is only accurate if run in a VCS checkout, otherwise it is likely to\n  be False or None\n\n* `['error']`: if the version string could not be computed, this will be set\n  to a string describing the problem, otherwise it will be None. It may be\n  useful to throw an exception in setup.py if this is set, to avoid e.g.\n  creating tarballs with a version string of \"unknown\".\n\nSome variants are more useful than others. Including `full-revisionid` in a\nbug report should allow developers to reconstruct the exact code being tested\n(or indicate the presence of local changes that should be shared with the\ndevelopers). `version` is suitable for display in an \"about\" box or a CLI\n`--version` output: it can be easily compared against release notes and lists\nof bugs fixed in various releases.\n\nThe installer adds the following text to your `__init__.py` to place a basic\nversion in `YOURPROJECT.__version__`:\n\n    from ._version import get_versions\n    __version__ = get_versions()['version']\n    del get_versions\n\n## Styles\n\nThe setup.cfg `style=` configuration controls how the VCS information is\nrendered into a version string.\n\nThe default style, \"pep440\", produces a PEP440-compliant string, equal to the\nun-prefixed tag name for actual releases, and containing an additional \"local\nversion\" section with more detail for in-between builds. For Git, this is\nTAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags\n--dirty --always`. For example \"0.11+2.g1076c97.dirty\" indicates that the\ntree is like the \"1076c97\" commit but has uncommitted changes (\".dirty\"), and\nthat this commit is two revisions (\"+2\") beyond the \"0.11\" tag. For released\nsoftware (exactly equal to a known tag), the identifier will only contain the\nstripped tag, e.g. \"0.11\".\n\nOther styles are available. See details.md in the Versioneer source tree for\ndescriptions.\n\n## Debugging\n\nVersioneer tries to avoid fatal errors: if something goes wrong, it will tend\nto return a version of \"0+unknown\". To investigate the problem, run `setup.py\nversion`, which will run the version-lookup code in a verbose mode, and will\ndisplay the full contents of `get_versions()` (including the `error` string,\nwhich may help identify what went wrong).\n\n## Updating Versioneer\n\nTo upgrade your project to a new release of Versioneer, do the following:\n\n* install the new Versioneer (`pip install -U versioneer` or equivalent)\n* edit `setup.cfg`, if necessary, to include any new configuration settings\n  indicated by the release notes\n* re-run `versioneer install` in your source tree, to replace\n  `SRC/_version.py`\n* commit any changed files\n\n### Upgrading to 0.15\n\nStarting with this version, Versioneer is configured with a `[versioneer]`\nsection in your `setup.cfg` file. Earlier versions required the `setup.py` to\nset attributes on the `versioneer` module immediately after import. The new\nversion will refuse to run (raising an exception during import) until you\nhave provided the necessary `setup.cfg` section.\n\nIn addition, the Versioneer package provides an executable named\n`versioneer`, and the installation process is driven by running `versioneer\ninstall`. In 0.14 and earlier, the executable was named\n`versioneer-installer` and was run without an argument.\n\n### Upgrading to 0.14\n\n0.14 changes the format of the version string. 0.13 and earlier used\nhyphen-separated strings like \"0.11-2-g1076c97-dirty\". 0.14 and beyond use a\nplus-separated \"local version\" section strings, with dot-separated\ncomponents, like \"0.11+2.g1076c97\". PEP440-strict tools did not like the old\nformat, but should be ok with the new one.\n\n### Upgrading from 0.11 to 0.12\n\nNothing special.\n\n### Upgrading from 0.10 to 0.11\n\nYou must add a `versioneer.VCS = \"git\"` to your `setup.py` before re-running\n`setup.py setup_versioneer`. This will enable the use of additional\nversion-control systems (SVN, etc) in the future.\n\n## Future Directions\n\nThis tool is designed to make it easily extended to other version-control\nsystems: all VCS-specific components are in separate directories like\nsrc/git/ . The top-level `versioneer.py` script is assembled from these\ncomponents by running make-versioneer.py . In the future, make-versioneer.py\nwill take a VCS name as an argument, and will construct a version of\n`versioneer.py` that is specific to the given VCS. It might also take the\nconfiguration arguments that are currently provided manually during\ninstallation by editing setup.py . Alternatively, it might go the other\ndirection and include code from all supported VCS systems, reducing the\nnumber of intermediate scripts.\n\n\n## License\n\nTo make Versioneer easier to embed, all its code is hereby released into the\npublic domain. The `_version.py` that it creates is also in the public\ndomain.\n\n\"\"\"\n\nfrom __future__ import print_function\ntry:\n    import configparser\nexcept ImportError:\n    import ConfigParser as configparser\nimport errno\nimport json\nimport os\nimport re\nimport subprocess\nimport sys\n\n\nclass VersioneerConfig:\n    pass\n\n\ndef get_root():\n    # we require that all commands are run from the project root, i.e. the\n    # directory that contains setup.py, setup.cfg, and versioneer.py .\n    root = os.path.realpath(os.path.abspath(os.getcwd()))\n    setup_py = os.path.join(root, \"setup.py\")\n    versioneer_py = os.path.join(root, \"versioneer.py\")\n    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n        # allow 'python path/to/setup.py COMMAND'\n        root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))\n        setup_py = os.path.join(root, \"setup.py\")\n        versioneer_py = os.path.join(root, \"versioneer.py\")\n    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n        err = (\"Versioneer was unable to run the project root directory. \"\n               \"Versioneer requires setup.py to be executed from \"\n               \"its immediate directory (like 'python setup.py COMMAND'), \"\n               \"or in a way that lets it use sys.argv[0] to find the root \"\n               \"(like 'python path/to/setup.py COMMAND').\")\n        raise VersioneerBadRootError(err)\n    try:\n        # Certain runtime workflows (setup.py install/develop in a setuptools\n        # tree) execute all dependencies in a single python process, so\n        # \"versioneer\" may be imported multiple times, and python's shared\n        # module-import table will cache the first one. So we can't use\n        # os.path.dirname(__file__), as that will find whichever\n        # versioneer.py was first imported, even in later projects.\n        me = os.path.realpath(os.path.abspath(__file__))\n        if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:\n            print(\"Warning: build in %s is using versioneer.py from %s\"\n                  % (os.path.dirname(me), versioneer_py))\n    except NameError:\n        pass\n    return root\n\n\ndef get_config_from_root(root):\n    # This might raise EnvironmentError (if setup.cfg is missing), or\n    # configparser.NoSectionError (if it lacks a [versioneer] section), or\n    # configparser.NoOptionError (if it lacks \"VCS=\"). See the docstring at\n    # the top of versioneer.py for instructions on writing your setup.cfg .\n    setup_cfg = os.path.join(root, \"setup.cfg\")\n    parser = configparser.SafeConfigParser()\n    with open(setup_cfg, \"r\") as f:\n        parser.readfp(f)\n    VCS = parser.get(\"versioneer\", \"VCS\")  # mandatory\n\n    def get(parser, name):\n        if parser.has_option(\"versioneer\", name):\n            return parser.get(\"versioneer\", name)\n        return None\n    cfg = VersioneerConfig()\n    cfg.VCS = VCS\n    cfg.style = get(parser, \"style\") or \"\"\n    cfg.versionfile_source = get(parser, \"versionfile_source\")\n    cfg.versionfile_build = get(parser, \"versionfile_build\")\n    cfg.tag_prefix = get(parser, \"tag_prefix\")\n    cfg.parentdir_prefix = get(parser, \"parentdir_prefix\")\n    cfg.verbose = get(parser, \"verbose\")\n    return cfg\n\n\nclass NotThisMethod(Exception):\n    pass\n\n# these dictionaries contain VCS-specific tools\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method):  # decorator\n    def decorate(f):\n        if vcs not in HANDLERS:\n            HANDLERS[vcs] = {}\n        HANDLERS[vcs][method] = f\n        return f\n    return decorate\n\n\ndef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):\n    assert isinstance(commands, list)\n    p = None\n    for c in commands:\n        try:\n            dispcmd = str([c] + args)\n            # remember shell=False, so use git.cmd on windows, not just git\n            p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,\n                                 stderr=(subprocess.PIPE if hide_stderr\n                                         else None))\n            break\n        except EnvironmentError:\n            e = sys.exc_info()[1]\n            if e.errno == errno.ENOENT:\n                continue\n            if verbose:\n                print(\"unable to run %s\" % dispcmd)\n                print(e)\n            return None\n    else:\n        if verbose:\n            print(\"unable to find command, tried %s\" % (commands,))\n        return None\n    stdout = p.communicate()[0].strip()\n    if sys.version_info[0] >= 3:\n        stdout = stdout.decode()\n    if p.returncode != 0:\n        if verbose:\n            print(\"unable to run %s (error)\" % dispcmd)\n        return None\n    return stdout\nLONG_VERSION_PY['git'] = '''\n# This file helps to compute a version number in source trees obtained from\n# git-archive tarball (such as those provided by githubs download-from-tag\n# feature). Distribution tarballs (built by setup.py sdist) and build\n# directories (produced by setup.py build) will contain a much shorter file\n# that just contains the computed version number.\n\n# This file is released into the public domain. Generated by\n# versioneer-0.15 (https://github.com/warner/python-versioneer)\n\nimport errno\nimport os\nimport re\nimport subprocess\nimport sys\n\n\ndef get_keywords():\n    # these strings will be replaced by git during git-archive.\n    # setup.py/versioneer.py will grep for the variable names, so they must\n    # each be defined on a line of their own. _version.py will just call\n    # get_keywords().\n    git_refnames = \"%(DOLLAR)sFormat:%%d%(DOLLAR)s\"\n    git_full = \"%(DOLLAR)sFormat:%%H%(DOLLAR)s\"\n    keywords = {\"refnames\": git_refnames, \"full\": git_full}\n    return keywords\n\n\nclass VersioneerConfig:\n    pass\n\n\ndef get_config():\n    # these strings are filled in when 'setup.py versioneer' creates\n    # _version.py\n    cfg = VersioneerConfig()\n    cfg.VCS = \"git\"\n    cfg.style = \"%(STYLE)s\"\n    cfg.tag_prefix = \"%(TAG_PREFIX)s\"\n    cfg.parentdir_prefix = \"%(PARENTDIR_PREFIX)s\"\n    cfg.versionfile_source = \"%(VERSIONFILE_SOURCE)s\"\n    cfg.verbose = False\n    return cfg\n\n\nclass NotThisMethod(Exception):\n    pass\n\n\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method):  # decorator\n    def decorate(f):\n        if vcs not in HANDLERS:\n            HANDLERS[vcs] = {}\n        HANDLERS[vcs][method] = f\n        return f\n    return decorate\n\n\ndef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):\n    assert isinstance(commands, list)\n    p = None\n    for c in commands:\n        try:\n            dispcmd = str([c] + args)\n            # remember shell=False, so use git.cmd on windows, not just git\n            p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,\n                                 stderr=(subprocess.PIPE if hide_stderr\n                                         else None))\n            break\n        except EnvironmentError:\n            e = sys.exc_info()[1]\n            if e.errno == errno.ENOENT:\n                continue\n            if verbose:\n                print(\"unable to run %%s\" %% dispcmd)\n                print(e)\n            return None\n    else:\n        if verbose:\n            print(\"unable to find command, tried %%s\" %% (commands,))\n        return None\n    stdout = p.communicate()[0].strip()\n    if sys.version_info[0] >= 3:\n        stdout = stdout.decode()\n    if p.returncode != 0:\n        if verbose:\n            print(\"unable to run %%s (error)\" %% dispcmd)\n        return None\n    return stdout\n\n\ndef versions_from_parentdir(parentdir_prefix, root, verbose):\n    # Source tarballs conventionally unpack into a directory that includes\n    # both the project name and a version string.\n    dirname = os.path.basename(root)\n    if not dirname.startswith(parentdir_prefix):\n        if verbose:\n            print(\"guessing rootdir is '%%s', but '%%s' doesn't start with \"\n                  \"prefix '%%s'\" %% (root, dirname, parentdir_prefix))\n        raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n    return {\"version\": dirname[len(parentdir_prefix):],\n            \"full-revisionid\": None,\n            \"dirty\": False, \"error\": None}\n\n\n@register_vcs_handler(\"git\", \"get_keywords\")\ndef git_get_keywords(versionfile_abs):\n    # the code embedded in _version.py can just fetch the value of these\n    # keywords. When used from setup.py, we don't want to import _version.py,\n    # so we do it with a regexp instead. This function is not used from\n    # _version.py.\n    keywords = {}\n    try:\n        f = open(versionfile_abs, \"r\")\n        for line in f.readlines():\n            if line.strip().startswith(\"git_refnames =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"refnames\"] = mo.group(1)\n            if line.strip().startswith(\"git_full =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"full\"] = mo.group(1)\n        f.close()\n    except EnvironmentError:\n        pass\n    return keywords\n\n\n@register_vcs_handler(\"git\", \"keywords\")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n    if not keywords:\n        raise NotThisMethod(\"no keywords at all, weird\")\n    refnames = keywords[\"refnames\"].strip()\n    if refnames.startswith(\"$Format\"):\n        if verbose:\n            print(\"keywords are unexpanded, not using\")\n        raise NotThisMethod(\"unexpanded keywords, not a git-archive tarball\")\n    refs = set([r.strip() for r in refnames.strip(\"()\").split(\",\")])\n    # starting in git-1.8.3, tags are listed as \"tag: foo-1.0\" instead of\n    # just \"foo-1.0\". If we see a \"tag: \" prefix, prefer those.\n    TAG = \"tag: \"\n    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n    if not tags:\n        # Either we're using git < 1.8.3, or there really are no tags. We use\n        # a heuristic: assume all version tags have a digit. The old git %%d\n        # expansion behaves like git log --decorate=short and strips out the\n        # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n        # between branches and tags. By ignoring refnames without digits, we\n        # filter out many common branch names like \"release\" and\n        # \"stabilization\", as well as \"HEAD\" and \"master\".\n        tags = set([r for r in refs if re.search(r'\\d', r)])\n        if verbose:\n            print(\"discarding '%%s', no digits\" %% \",\".join(refs-tags))\n    if verbose:\n        print(\"likely tags: %%s\" %% \",\".join(sorted(tags)))\n    for ref in sorted(tags):\n        # sorting will prefer e.g. \"2.0\" over \"2.0rc1\"\n        if ref.startswith(tag_prefix):\n            r = ref[len(tag_prefix):]\n            if verbose:\n                print(\"picking %%s\" %% r)\n            return {\"version\": r,\n                    \"full-revisionid\": keywords[\"full\"].strip(),\n                    \"dirty\": False, \"error\": None\n                    }\n    # no suitable tags, so version is \"0+unknown\", but full hex is still there\n    if verbose:\n        print(\"no suitable tags, using unknown + full revision id\")\n    return {\"version\": \"0+unknown\",\n            \"full-revisionid\": keywords[\"full\"].strip(),\n            \"dirty\": False, \"error\": \"no suitable tags\"}\n\n\n@register_vcs_handler(\"git\", \"pieces_from_vcs\")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n    # this runs 'git' from the root of the source tree. This only gets called\n    # if the git-archive 'subst' keywords were *not* expanded, and\n    # _version.py hasn't already been rewritten with a short version string,\n    # meaning we're inside a checked out source tree.\n\n    if not os.path.exists(os.path.join(root, \".git\")):\n        if verbose:\n            print(\"no .git in %%s\" %% root)\n        raise NotThisMethod(\"no .git directory\")\n\n    GITS = [\"git\"]\n    if sys.platform == \"win32\":\n        GITS = [\"git.cmd\", \"git.exe\"]\n    # if there is a tag, this yields TAG-NUM-gHEX[-dirty]\n    # if there are no tags, this yields HEX[-dirty] (no NUM)\n    describe_out = run_command(GITS, [\"describe\", \"--tags\", \"--dirty\",\n                                      \"--always\", \"--long\"],\n                               cwd=root)\n    # --long was added in git-1.5.5\n    if describe_out is None:\n        raise NotThisMethod(\"'git describe' failed\")\n    describe_out = describe_out.strip()\n    full_out = run_command(GITS, [\"rev-parse\", \"HEAD\"], cwd=root)\n    if full_out is None:\n        raise NotThisMethod(\"'git rev-parse' failed\")\n    full_out = full_out.strip()\n\n    pieces = {}\n    pieces[\"long\"] = full_out\n    pieces[\"short\"] = full_out[:7]  # maybe improved later\n    pieces[\"error\"] = None\n\n    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n    # TAG might have hyphens.\n    git_describe = describe_out\n\n    # look for -dirty suffix\n    dirty = git_describe.endswith(\"-dirty\")\n    pieces[\"dirty\"] = dirty\n    if dirty:\n        git_describe = git_describe[:git_describe.rindex(\"-dirty\")]\n\n    # now we have TAG-NUM-gHEX or HEX\n\n    if \"-\" in git_describe:\n        # TAG-NUM-gHEX\n        mo = re.search(r'^(.+)-(\\d+)-g([0-9a-f]+)$', git_describe)\n        if not mo:\n            # unparseable. Maybe git-describe is misbehaving?\n            pieces[\"error\"] = (\"unable to parse git-describe output: '%%s'\"\n                               %% describe_out)\n            return pieces\n\n        # tag\n        full_tag = mo.group(1)\n        if not full_tag.startswith(tag_prefix):\n            if verbose:\n                fmt = \"tag '%%s' doesn't start with prefix '%%s'\"\n                print(fmt %% (full_tag, tag_prefix))\n            pieces[\"error\"] = (\"tag '%%s' doesn't start with prefix '%%s'\"\n                               %% (full_tag, tag_prefix))\n            return pieces\n        pieces[\"closest-tag\"] = full_tag[len(tag_prefix):]\n\n        # distance: number of commits since tag\n        pieces[\"distance\"] = int(mo.group(2))\n\n        # commit: short hex revision ID\n        pieces[\"short\"] = mo.group(3)\n\n    else:\n        # HEX: no tags\n        pieces[\"closest-tag\"] = None\n        count_out = run_command(GITS, [\"rev-list\", \"HEAD\", \"--count\"],\n                                cwd=root)\n        pieces[\"distance\"] = int(count_out)  # total number of commits\n\n    return pieces\n\n\ndef plus_or_dot(pieces):\n    if \"+\" in pieces.get(\"closest-tag\", \"\"):\n        return \".\"\n    return \"+\"\n\n\ndef render_pep440(pieces):\n    # now build up version string, with post-release \"local version\n    # identifier\". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n    # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n    # exceptions:\n    # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += plus_or_dot(pieces)\n            rendered += \"%%d.g%%s\" %% (pieces[\"distance\"], pieces[\"short\"])\n            if pieces[\"dirty\"]:\n                rendered += \".dirty\"\n    else:\n        # exception #1\n        rendered = \"0+untagged.%%d.g%%s\" %% (pieces[\"distance\"],\n                                          pieces[\"short\"])\n        if pieces[\"dirty\"]:\n            rendered += \".dirty\"\n    return rendered\n\n\ndef render_pep440_pre(pieces):\n    # TAG[.post.devDISTANCE] . No -dirty\n\n    # exceptions:\n    # 1: no tags. 0.post.devDISTANCE\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \".post.dev%%d\" %% pieces[\"distance\"]\n    else:\n        # exception #1\n        rendered = \"0.post.dev%%d\" %% pieces[\"distance\"]\n    return rendered\n\n\ndef render_pep440_post(pieces):\n    # TAG[.postDISTANCE[.dev0]+gHEX] . The \".dev0\" means dirty. Note that\n    # .dev0 sorts backwards (a dirty tree will appear \"older\" than the\n    # corresponding clean one), but you shouldn't be releasing software with\n    # -dirty anyways.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%%d\" %% pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n            rendered += plus_or_dot(pieces)\n            rendered += \"g%%s\" %% pieces[\"short\"]\n    else:\n        # exception #1\n        rendered = \"0.post%%d\" %% pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n        rendered += \"+g%%s\" %% pieces[\"short\"]\n    return rendered\n\n\ndef render_pep440_old(pieces):\n    # TAG[.postDISTANCE[.dev0]] . The \".dev0\" means dirty.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%%d\" %% pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n    else:\n        # exception #1\n        rendered = \"0.post%%d\" %% pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n    return rendered\n\n\ndef render_git_describe(pieces):\n    # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty\n    # --always'\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \"-%%d-g%%s\" %% (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render_git_describe_long(pieces):\n    # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty\n    # --always -long'. The distance/hash is unconditional.\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        rendered += \"-%%d-g%%s\" %% (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render(pieces, style):\n    if pieces[\"error\"]:\n        return {\"version\": \"unknown\",\n                \"full-revisionid\": pieces.get(\"long\"),\n                \"dirty\": None,\n                \"error\": pieces[\"error\"]}\n\n    if not style or style == \"default\":\n        style = \"pep440\"  # the default\n\n    if style == \"pep440\":\n        rendered = render_pep440(pieces)\n    elif style == \"pep440-pre\":\n        rendered = render_pep440_pre(pieces)\n    elif style == \"pep440-post\":\n        rendered = render_pep440_post(pieces)\n    elif style == \"pep440-old\":\n        rendered = render_pep440_old(pieces)\n    elif style == \"git-describe\":\n        rendered = render_git_describe(pieces)\n    elif style == \"git-describe-long\":\n        rendered = render_git_describe_long(pieces)\n    else:\n        raise ValueError(\"unknown style '%%s'\" %% style)\n\n    return {\"version\": rendered, \"full-revisionid\": pieces[\"long\"],\n            \"dirty\": pieces[\"dirty\"], \"error\": None}\n\n\ndef get_versions():\n    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have\n    # __file__, we can work backwards from there to the root. Some\n    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which\n    # case we can only use expanded keywords.\n\n    cfg = get_config()\n    verbose = cfg.verbose\n\n    try:\n        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,\n                                          verbose)\n    except NotThisMethod:\n        pass\n\n    try:\n        root = os.path.realpath(__file__)\n        # versionfile_source is the relative path from the top of the source\n        # tree (where the .git directory might live) to this file. Invert\n        # this to find the root from __file__.\n        for i in cfg.versionfile_source.split('/'):\n            root = os.path.dirname(root)\n    except NameError:\n        return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n                \"dirty\": None,\n                \"error\": \"unable to find root of source tree\"}\n\n    try:\n        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)\n        return render(pieces, cfg.style)\n    except NotThisMethod:\n        pass\n\n    try:\n        if cfg.parentdir_prefix:\n            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n    except NotThisMethod:\n        pass\n\n    return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n            \"dirty\": None,\n            \"error\": \"unable to compute version\"}\n'''\n\n\n@register_vcs_handler(\"git\", \"get_keywords\")\ndef git_get_keywords(versionfile_abs):\n    # the code embedded in _version.py can just fetch the value of these\n    # keywords. When used from setup.py, we don't want to import _version.py,\n    # so we do it with a regexp instead. This function is not used from\n    # _version.py.\n    keywords = {}\n    try:\n        f = open(versionfile_abs, \"r\")\n        for line in f.readlines():\n            if line.strip().startswith(\"git_refnames =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"refnames\"] = mo.group(1)\n            if line.strip().startswith(\"git_full =\"):\n                mo = re.search(r'=\\s*\"(.*)\"', line)\n                if mo:\n                    keywords[\"full\"] = mo.group(1)\n        f.close()\n    except EnvironmentError:\n        pass\n    return keywords\n\n\n@register_vcs_handler(\"git\", \"keywords\")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n    if not keywords:\n        raise NotThisMethod(\"no keywords at all, weird\")\n    refnames = keywords[\"refnames\"].strip()\n    if refnames.startswith(\"$Format\"):\n        if verbose:\n            print(\"keywords are unexpanded, not using\")\n        raise NotThisMethod(\"unexpanded keywords, not a git-archive tarball\")\n    refs = set([r.strip() for r in refnames.strip(\"()\").split(\",\")])\n    # starting in git-1.8.3, tags are listed as \"tag: foo-1.0\" instead of\n    # just \"foo-1.0\". If we see a \"tag: \" prefix, prefer those.\n    TAG = \"tag: \"\n    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n    if not tags:\n        # Either we're using git < 1.8.3, or there really are no tags. We use\n        # a heuristic: assume all version tags have a digit. The old git %d\n        # expansion behaves like git log --decorate=short and strips out the\n        # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n        # between branches and tags. By ignoring refnames without digits, we\n        # filter out many common branch names like \"release\" and\n        # \"stabilization\", as well as \"HEAD\" and \"master\".\n        tags = set([r for r in refs if re.search(r'\\d', r)])\n        if verbose:\n            print(\"discarding '%s', no digits\" % \",\".join(refs-tags))\n    if verbose:\n        print(\"likely tags: %s\" % \",\".join(sorted(tags)))\n    for ref in sorted(tags):\n        # sorting will prefer e.g. \"2.0\" over \"2.0rc1\"\n        if ref.startswith(tag_prefix):\n            r = ref[len(tag_prefix):]\n            if verbose:\n                print(\"picking %s\" % r)\n            return {\"version\": r,\n                    \"full-revisionid\": keywords[\"full\"].strip(),\n                    \"dirty\": False, \"error\": None\n                    }\n    # no suitable tags, so version is \"0+unknown\", but full hex is still there\n    if verbose:\n        print(\"no suitable tags, using unknown + full revision id\")\n    return {\"version\": \"0+unknown\",\n            \"full-revisionid\": keywords[\"full\"].strip(),\n            \"dirty\": False, \"error\": \"no suitable tags\"}\n\n\n@register_vcs_handler(\"git\", \"pieces_from_vcs\")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n    # this runs 'git' from the root of the source tree. This only gets called\n    # if the git-archive 'subst' keywords were *not* expanded, and\n    # _version.py hasn't already been rewritten with a short version string,\n    # meaning we're inside a checked out source tree.\n\n    if not os.path.exists(os.path.join(root, \".git\")):\n        if verbose:\n            print(\"no .git in %s\" % root)\n        raise NotThisMethod(\"no .git directory\")\n\n    GITS = [\"git\"]\n    if sys.platform == \"win32\":\n        GITS = [\"git.cmd\", \"git.exe\"]\n    # if there is a tag, this yields TAG-NUM-gHEX[-dirty]\n    # if there are no tags, this yields HEX[-dirty] (no NUM)\n    describe_out = run_command(GITS, [\"describe\", \"--tags\", \"--dirty\",\n                                      \"--always\", \"--long\"],\n                               cwd=root)\n    # --long was added in git-1.5.5\n    if describe_out is None:\n        raise NotThisMethod(\"'git describe' failed\")\n    describe_out = describe_out.strip()\n    full_out = run_command(GITS, [\"rev-parse\", \"HEAD\"], cwd=root)\n    if full_out is None:\n        raise NotThisMethod(\"'git rev-parse' failed\")\n    full_out = full_out.strip()\n\n    pieces = {}\n    pieces[\"long\"] = full_out\n    pieces[\"short\"] = full_out[:7]  # maybe improved later\n    pieces[\"error\"] = None\n\n    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n    # TAG might have hyphens.\n    git_describe = describe_out\n\n    # look for -dirty suffix\n    dirty = git_describe.endswith(\"-dirty\")\n    pieces[\"dirty\"] = dirty\n    if dirty:\n        git_describe = git_describe[:git_describe.rindex(\"-dirty\")]\n\n    # now we have TAG-NUM-gHEX or HEX\n\n    if \"-\" in git_describe:\n        # TAG-NUM-gHEX\n        mo = re.search(r'^(.+)-(\\d+)-g([0-9a-f]+)$', git_describe)\n        if not mo:\n            # unparseable. Maybe git-describe is misbehaving?\n            pieces[\"error\"] = (\"unable to parse git-describe output: '%s'\"\n                               % describe_out)\n            return pieces\n\n        # tag\n        full_tag = mo.group(1)\n        if not full_tag.startswith(tag_prefix):\n            if verbose:\n                fmt = \"tag '%s' doesn't start with prefix '%s'\"\n                print(fmt % (full_tag, tag_prefix))\n            pieces[\"error\"] = (\"tag '%s' doesn't start with prefix '%s'\"\n                               % (full_tag, tag_prefix))\n            return pieces\n        pieces[\"closest-tag\"] = full_tag[len(tag_prefix):]\n\n        # distance: number of commits since tag\n        pieces[\"distance\"] = int(mo.group(2))\n\n        # commit: short hex revision ID\n        pieces[\"short\"] = mo.group(3)\n\n    else:\n        # HEX: no tags\n        pieces[\"closest-tag\"] = None\n        count_out = run_command(GITS, [\"rev-list\", \"HEAD\", \"--count\"],\n                                cwd=root)\n        pieces[\"distance\"] = int(count_out)  # total number of commits\n\n    return pieces\n\n\ndef do_vcs_install(manifest_in, versionfile_source, ipy):\n    GITS = [\"git\"]\n    if sys.platform == \"win32\":\n        GITS = [\"git.cmd\", \"git.exe\"]\n    files = [manifest_in, versionfile_source]\n    if ipy:\n        files.append(ipy)\n    try:\n        me = __file__\n        if me.endswith(\".pyc\") or me.endswith(\".pyo\"):\n            me = os.path.splitext(me)[0] + \".py\"\n        versioneer_file = os.path.relpath(me)\n    except NameError:\n        versioneer_file = \"versioneer.py\"\n    files.append(versioneer_file)\n    present = False\n    try:\n        f = open(\".gitattributes\", \"r\")\n        for line in f.readlines():\n            if line.strip().startswith(versionfile_source):\n                if \"export-subst\" in line.strip().split()[1:]:\n                    present = True\n        f.close()\n    except EnvironmentError:\n        pass\n    if not present:\n        f = open(\".gitattributes\", \"a+\")\n        f.write(\"%s export-subst\\n\" % versionfile_source)\n        f.close()\n        files.append(\".gitattributes\")\n    run_command(GITS, [\"add\", \"--\"] + files)\n\n\ndef versions_from_parentdir(parentdir_prefix, root, verbose):\n    # Source tarballs conventionally unpack into a directory that includes\n    # both the project name and a version string.\n    dirname = os.path.basename(root)\n    if not dirname.startswith(parentdir_prefix):\n        if verbose:\n            print(\"guessing rootdir is '%s', but '%s' doesn't start with \"\n                  \"prefix '%s'\" % (root, dirname, parentdir_prefix))\n        raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n    return {\"version\": dirname[len(parentdir_prefix):],\n            \"full-revisionid\": None,\n            \"dirty\": False, \"error\": None}\n\nSHORT_VERSION_PY = \"\"\"\n# This file was generated by 'versioneer.py' (0.15) from\n# revision-control system data, or from the parent directory name of an\n# unpacked source archive. Distribution tarballs contain a pre-generated copy\n# of this file.\n\nimport json\nimport sys\n\nversion_json = '''\n%s\n'''  # END VERSION_JSON\n\n\ndef get_versions():\n    return json.loads(version_json)\n\"\"\"\n\n\ndef versions_from_file(filename):\n    try:\n        with open(filename) as f:\n            contents = f.read()\n    except EnvironmentError:\n        raise NotThisMethod(\"unable to read _version.py\")\n    mo = re.search(r\"version_json = '''\\n(.*)'''  # END VERSION_JSON\",\n                   contents, re.M | re.S)\n    if not mo:\n        raise NotThisMethod(\"no version_json in _version.py\")\n    return json.loads(mo.group(1))\n\n\ndef write_to_version_file(filename, versions):\n    os.unlink(filename)\n    contents = json.dumps(versions, sort_keys=True,\n                          indent=1, separators=(\",\", \": \"))\n    with open(filename, \"w\") as f:\n        f.write(SHORT_VERSION_PY % contents)\n\n    print(\"set %s to '%s'\" % (filename, versions[\"version\"]))\n\n\ndef plus_or_dot(pieces):\n    if \"+\" in pieces.get(\"closest-tag\", \"\"):\n        return \".\"\n    return \"+\"\n\n\ndef render_pep440(pieces):\n    # now build up version string, with post-release \"local version\n    # identifier\". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n    # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n    # exceptions:\n    # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += plus_or_dot(pieces)\n            rendered += \"%d.g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n            if pieces[\"dirty\"]:\n                rendered += \".dirty\"\n    else:\n        # exception #1\n        rendered = \"0+untagged.%d.g%s\" % (pieces[\"distance\"],\n                                          pieces[\"short\"])\n        if pieces[\"dirty\"]:\n            rendered += \".dirty\"\n    return rendered\n\n\ndef render_pep440_pre(pieces):\n    # TAG[.post.devDISTANCE] . No -dirty\n\n    # exceptions:\n    # 1: no tags. 0.post.devDISTANCE\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \".post.dev%d\" % pieces[\"distance\"]\n    else:\n        # exception #1\n        rendered = \"0.post.dev%d\" % pieces[\"distance\"]\n    return rendered\n\n\ndef render_pep440_post(pieces):\n    # TAG[.postDISTANCE[.dev0]+gHEX] . The \".dev0\" means dirty. Note that\n    # .dev0 sorts backwards (a dirty tree will appear \"older\" than the\n    # corresponding clean one), but you shouldn't be releasing software with\n    # -dirty anyways.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%d\" % pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n            rendered += plus_or_dot(pieces)\n            rendered += \"g%s\" % pieces[\"short\"]\n    else:\n        # exception #1\n        rendered = \"0.post%d\" % pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n        rendered += \"+g%s\" % pieces[\"short\"]\n    return rendered\n\n\ndef render_pep440_old(pieces):\n    # TAG[.postDISTANCE[.dev0]] . The \".dev0\" means dirty.\n\n    # exceptions:\n    # 1: no tags. 0.postDISTANCE[.dev0]\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"] or pieces[\"dirty\"]:\n            rendered += \".post%d\" % pieces[\"distance\"]\n            if pieces[\"dirty\"]:\n                rendered += \".dev0\"\n    else:\n        # exception #1\n        rendered = \"0.post%d\" % pieces[\"distance\"]\n        if pieces[\"dirty\"]:\n            rendered += \".dev0\"\n    return rendered\n\n\ndef render_git_describe(pieces):\n    # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty\n    # --always'\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        if pieces[\"distance\"]:\n            rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render_git_describe_long(pieces):\n    # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty\n    # --always -long'. The distance/hash is unconditional.\n\n    # exceptions:\n    # 1: no tags. HEX[-dirty]  (note: no 'g' prefix)\n\n    if pieces[\"closest-tag\"]:\n        rendered = pieces[\"closest-tag\"]\n        rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n    else:\n        # exception #1\n        rendered = pieces[\"short\"]\n    if pieces[\"dirty\"]:\n        rendered += \"-dirty\"\n    return rendered\n\n\ndef render(pieces, style):\n    if pieces[\"error\"]:\n        return {\"version\": \"unknown\",\n                \"full-revisionid\": pieces.get(\"long\"),\n                \"dirty\": None,\n                \"error\": pieces[\"error\"]}\n\n    if not style or style == \"default\":\n        style = \"pep440\"  # the default\n\n    if style == \"pep440\":\n        rendered = render_pep440(pieces)\n    elif style == \"pep440-pre\":\n        rendered = render_pep440_pre(pieces)\n    elif style == \"pep440-post\":\n        rendered = render_pep440_post(pieces)\n    elif style == \"pep440-old\":\n        rendered = render_pep440_old(pieces)\n    elif style == \"git-describe\":\n        rendered = render_git_describe(pieces)\n    elif style == \"git-describe-long\":\n        rendered = render_git_describe_long(pieces)\n    else:\n        raise ValueError(\"unknown style '%s'\" % style)\n\n    return {\"version\": rendered, \"full-revisionid\": pieces[\"long\"],\n            \"dirty\": pieces[\"dirty\"], \"error\": None}\n\n\nclass VersioneerBadRootError(Exception):\n    pass\n\n\ndef get_versions(verbose=False):\n    # returns dict with two keys: 'version' and 'full'\n\n    if \"versioneer\" in sys.modules:\n        # see the discussion in cmdclass.py:get_cmdclass()\n        del sys.modules[\"versioneer\"]\n\n    root = get_root()\n    cfg = get_config_from_root(root)\n\n    assert cfg.VCS is not None, \"please set [versioneer]VCS= in setup.cfg\"\n    handlers = HANDLERS.get(cfg.VCS)\n    assert handlers, \"unrecognized VCS '%s'\" % cfg.VCS\n    verbose = verbose or cfg.verbose\n    assert cfg.versionfile_source is not None, \\\n        \"please set versioneer.versionfile_source\"\n    assert cfg.tag_prefix is not None, \"please set versioneer.tag_prefix\"\n\n    versionfile_abs = os.path.join(root, cfg.versionfile_source)\n\n    # extract version from first of: _version.py, VCS command (e.g. 'git\n    # describe'), parentdir. This is meant to work for developers using a\n    # source checkout, for users of a tarball created by 'setup.py sdist',\n    # and for users of a tarball/zipball created by 'git archive' or github's\n    # download-from-tag feature or the equivalent in other VCSes.\n\n    get_keywords_f = handlers.get(\"get_keywords\")\n    from_keywords_f = handlers.get(\"keywords\")\n    if get_keywords_f and from_keywords_f:\n        try:\n            keywords = get_keywords_f(versionfile_abs)\n            ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)\n            if verbose:\n                print(\"got version from expanded keyword %s\" % ver)\n            return ver\n        except NotThisMethod:\n            pass\n\n    try:\n        ver = versions_from_file(versionfile_abs)\n        if verbose:\n            print(\"got version from file %s %s\" % (versionfile_abs, ver))\n        return ver\n    except NotThisMethod:\n        pass\n\n    from_vcs_f = handlers.get(\"pieces_from_vcs\")\n    if from_vcs_f:\n        try:\n            pieces = from_vcs_f(cfg.tag_prefix, root, verbose)\n            ver = render(pieces, cfg.style)\n            if verbose:\n                print(\"got version from VCS %s\" % ver)\n            return ver\n        except NotThisMethod:\n            pass\n\n    try:\n        if cfg.parentdir_prefix:\n            ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n            if verbose:\n                print(\"got version from parentdir %s\" % ver)\n            return ver\n    except NotThisMethod:\n        pass\n\n    if verbose:\n        print(\"unable to compute version\")\n\n    return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n            \"dirty\": None, \"error\": \"unable to compute version\"}\n\n\ndef get_version():\n    return get_versions()[\"version\"]\n\n\ndef get_cmdclass():\n    if \"versioneer\" in sys.modules:\n        del sys.modules[\"versioneer\"]\n        # this fixes the \"python setup.py develop\" case (also 'install' and\n        # 'easy_install .'), in which subdependencies of the main project are\n        # built (using setup.py bdist_egg) in the same python process. Assume\n        # a main project A and a dependency B, which use different versions\n        # of Versioneer. A's setup.py imports A's Versioneer, leaving it in\n        # sys.modules by the time B's setup.py is executed, causing B to run\n        # with the wrong versioneer. Setuptools wraps the sub-dep builds in a\n        # sandbox that restores sys.modules to it's pre-build state, so the\n        # parent is protected against the child's \"import versioneer\". By\n        # removing ourselves from sys.modules here, before the child build\n        # happens, we protect the child from the parent's versioneer too.\n        # Also see https://github.com/warner/python-versioneer/issues/52\n\n    cmds = {}\n\n    # we add \"version\" to both distutils and setuptools\n    from distutils.core import Command\n\n    class cmd_version(Command):\n        description = \"report generated version string\"\n        user_options = []\n        boolean_options = []\n\n        def initialize_options(self):\n            pass\n\n        def finalize_options(self):\n            pass\n\n        def run(self):\n            vers = get_versions(verbose=True)\n            print(\"Version: %s\" % vers[\"version\"])\n            print(\" full-revisionid: %s\" % vers.get(\"full-revisionid\"))\n            print(\" dirty: %s\" % vers.get(\"dirty\"))\n            if vers[\"error\"]:\n                print(\" error: %s\" % vers[\"error\"])\n    cmds[\"version\"] = cmd_version\n\n    # we override \"build_py\" in both distutils and setuptools\n    #\n    # most invocation pathways end up running build_py:\n    #  distutils/build -> build_py\n    #  distutils/install -> distutils/build ->..\n    #  setuptools/bdist_wheel -> distutils/install ->..\n    #  setuptools/bdist_egg -> distutils/install_lib -> build_py\n    #  setuptools/install -> bdist_egg ->..\n    #  setuptools/develop -> ?\n\n    from distutils.command.build_py import build_py as _build_py\n\n    class cmd_build_py(_build_py):\n        def run(self):\n            root = get_root()\n            cfg = get_config_from_root(root)\n            versions = get_versions()\n            _build_py.run(self)\n            # now locate _version.py in the new build/ directory and replace\n            # it with an updated value\n            if cfg.versionfile_build:\n                target_versionfile = os.path.join(self.build_lib,\n                                                  cfg.versionfile_build)\n                print(\"UPDATING %s\" % target_versionfile)\n                write_to_version_file(target_versionfile, versions)\n    cmds[\"build_py\"] = cmd_build_py\n\n    if \"cx_Freeze\" in sys.modules:  # cx_freeze enabled?\n        from cx_Freeze.dist import build_exe as _build_exe\n\n        class cmd_build_exe(_build_exe):\n            def run(self):\n                root = get_root()\n                cfg = get_config_from_root(root)\n                versions = get_versions()\n                target_versionfile = cfg.versionfile_source\n                print(\"UPDATING %s\" % target_versionfile)\n                write_to_version_file(target_versionfile, versions)\n\n                _build_exe.run(self)\n                os.unlink(target_versionfile)\n                with open(cfg.versionfile_source, \"w\") as f:\n                    LONG = LONG_VERSION_PY[cfg.VCS]\n                    f.write(LONG %\n                            {\"DOLLAR\": \"$\",\n                             \"STYLE\": cfg.style,\n                             \"TAG_PREFIX\": cfg.tag_prefix,\n                             \"PARENTDIR_PREFIX\": cfg.parentdir_prefix,\n                             \"VERSIONFILE_SOURCE\": cfg.versionfile_source,\n                             })\n        cmds[\"build_exe\"] = cmd_build_exe\n        del cmds[\"build_py\"]\n\n    # we override different \"sdist\" commands for both environments\n    if \"setuptools\" in sys.modules:\n        from setuptools.command.sdist import sdist as _sdist\n    else:\n        from distutils.command.sdist import sdist as _sdist\n\n    class cmd_sdist(_sdist):\n        def run(self):\n            versions = get_versions()\n            self._versioneer_generated_versions = versions\n            # unless we update this, the command will keep using the old\n            # version\n            self.distribution.metadata.version = versions[\"version\"]\n            return _sdist.run(self)\n\n        def make_release_tree(self, base_dir, files):\n            root = get_root()\n            cfg = get_config_from_root(root)\n            _sdist.make_release_tree(self, base_dir, files)\n            # now locate _version.py in the new base_dir directory\n            # (remembering that it may be a hardlink) and replace it with an\n            # updated value\n            target_versionfile = os.path.join(base_dir, cfg.versionfile_source)\n            print(\"UPDATING %s\" % target_versionfile)\n            write_to_version_file(target_versionfile,\n                                  self._versioneer_generated_versions)\n    cmds[\"sdist\"] = cmd_sdist\n\n    return cmds\n\n\nCONFIG_ERROR = \"\"\"\nsetup.cfg is missing the necessary Versioneer configuration. You need\na section like:\n\n [versioneer]\n VCS = git\n style = pep440\n versionfile_source = src/myproject/_version.py\n versionfile_build = myproject/_version.py\n tag_prefix = \"\"\n parentdir_prefix = myproject-\n\nYou will also need to edit your setup.py to use the results:\n\n import versioneer\n setup(version=versioneer.get_version(),\n       cmdclass=versioneer.get_cmdclass(), ...)\n\nPlease read the docstring in ./versioneer.py for configuration instructions,\nedit setup.cfg, and re-run the installer or 'python versioneer.py setup'.\n\"\"\"\n\nSAMPLE_CONFIG = \"\"\"\n# See the docstring in versioneer.py for instructions. Note that you must\n# re-run 'versioneer.py setup' after changing this section, and commit the\n# resulting files.\n\n[versioneer]\n#VCS = git\n#style = pep440\n#versionfile_source =\n#versionfile_build =\n#tag_prefix =\n#parentdir_prefix =\n\n\"\"\"\n\nINIT_PY_SNIPPET = \"\"\"\nfrom ._version import get_versions\n__version__ = get_versions()['version']\ndel get_versions\n\"\"\"\n\n\ndef do_setup():\n    root = get_root()\n    try:\n        cfg = get_config_from_root(root)\n    except (EnvironmentError, configparser.NoSectionError,\n            configparser.NoOptionError) as e:\n        if isinstance(e, (EnvironmentError, configparser.NoSectionError)):\n            print(\"Adding sample versioneer config to setup.cfg\",\n                  file=sys.stderr)\n            with open(os.path.join(root, \"setup.cfg\"), \"a\") as f:\n                f.write(SAMPLE_CONFIG)\n        print(CONFIG_ERROR, file=sys.stderr)\n        return 1\n\n    print(\" creating %s\" % cfg.versionfile_source)\n    with open(cfg.versionfile_source, \"w\") as f:\n        LONG = LONG_VERSION_PY[cfg.VCS]\n        f.write(LONG % {\"DOLLAR\": \"$\",\n                        \"STYLE\": cfg.style,\n                        \"TAG_PREFIX\": cfg.tag_prefix,\n                        \"PARENTDIR_PREFIX\": cfg.parentdir_prefix,\n                        \"VERSIONFILE_SOURCE\": cfg.versionfile_source,\n                        })\n\n    ipy = os.path.join(os.path.dirname(cfg.versionfile_source),\n                       \"__init__.py\")\n    if os.path.exists(ipy):\n        try:\n            with open(ipy, \"r\") as f:\n                old = f.read()\n        except EnvironmentError:\n            old = \"\"\n        if INIT_PY_SNIPPET not in old:\n            print(\" appending to %s\" % ipy)\n            with open(ipy, \"a\") as f:\n                f.write(INIT_PY_SNIPPET)\n        else:\n            print(\" %s unmodified\" % ipy)\n    else:\n        print(\" %s doesn't exist, ok\" % ipy)\n        ipy = None\n\n    # Make sure both the top-level \"versioneer.py\" and versionfile_source\n    # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so\n    # they'll be copied into source distributions. Pip won't be able to\n    # install the package without this.\n    manifest_in = os.path.join(root, \"MANIFEST.in\")\n    simple_includes = set()\n    try:\n        with open(manifest_in, \"r\") as f:\n            for line in f:\n                if line.startswith(\"include \"):\n                    for include in line.split()[1:]:\n                        simple_includes.add(include)\n    except EnvironmentError:\n        pass\n    # That doesn't cover everything MANIFEST.in can do\n    # (http://docs.python.org/2/distutils/sourcedist.html#commands), so\n    # it might give some false negatives. Appending redundant 'include'\n    # lines is safe, though.\n    if \"versioneer.py\" not in simple_includes:\n        print(\" appending 'versioneer.py' to MANIFEST.in\")\n        with open(manifest_in, \"a\") as f:\n            f.write(\"include versioneer.py\\n\")\n    else:\n        print(\" 'versioneer.py' already in MANIFEST.in\")\n    if cfg.versionfile_source not in simple_includes:\n        print(\" appending versionfile_source ('%s') to MANIFEST.in\" %\n              cfg.versionfile_source)\n        with open(manifest_in, \"a\") as f:\n            f.write(\"include %s\\n\" % cfg.versionfile_source)\n    else:\n        print(\" versionfile_source already in MANIFEST.in\")\n\n    # Make VCS-specific changes. For git, this means creating/changing\n    # .gitattributes to mark _version.py for export-time keyword\n    # substitution.\n    do_vcs_install(manifest_in, cfg.versionfile_source, ipy)\n    return 0\n\n\ndef scan_setup_py():\n    found = set()\n    setters = False\n    errors = 0\n    with open(\"setup.py\", \"r\") as f:\n        for line in f.readlines():\n            if \"import versioneer\" in line:\n                found.add(\"import\")\n            if \"versioneer.get_cmdclass()\" in line:\n                found.add(\"cmdclass\")\n            if \"versioneer.get_version()\" in line:\n                found.add(\"get_version\")\n            if \"versioneer.VCS\" in line:\n                setters = True\n            if \"versioneer.versionfile_source\" in line:\n                setters = True\n    if len(found) != 3:\n        print(\"\")\n        print(\"Your setup.py appears to be missing some important items\")\n        print(\"(but I might be wrong). Please make sure it has something\")\n        print(\"roughly like the following:\")\n        print(\"\")\n        print(\" import versioneer\")\n        print(\" setup( version=versioneer.get_version(),\")\n        print(\"        cmdclass=versioneer.get_cmdclass(),  ...)\")\n        print(\"\")\n        errors += 1\n    if setters:\n        print(\"You should remove lines like 'versioneer.VCS = ' and\")\n        print(\"'versioneer.versionfile_source = ' . This configuration\")\n        print(\"now lives in setup.cfg, and should be removed from setup.py\")\n        print(\"\")\n        errors += 1\n    return errors\n\nif __name__ == \"__main__\":\n    cmd = sys.argv[1]\n    if cmd == \"setup\":\n        errors = do_setup()\n        errors += scan_setup_py()\n        if errors:\n            sys.exit(1)\n"
  }
]