[
  {
    "path": ".flake8",
    "content": "[flake8]\nignore =\n    # whitespace before ':' (Black)\n    E203,\n    # line break before binary operator (Black)\n    W503,\n\nper-file-ignores =\n    # Files and directories which need fixes or specific exceptions.\n    #\n    # Description of codes:\n    # E401    multiple imports on one line\n    # E501    line too long\n    #\n    ctypesgen/__init__.py: F401\n    ctypesgen/parser/cgrammar.py: E501\n\nmax-line-length = 100\n\nexclude =\n    ctypesgen/parser/parsetab.py,\n    ctypesgen/parser/lextab.py,\n\tctypesgen/parser/yacc.py,\n\tctypesgen/parser/lex.py,\n    demo/pydemolib.py,\n    .git,\n    __pycache__,\n    debian\n\nbuiltins =\n    _,\n"
  },
  {
    "path": ".github/workflows/black.yml",
    "content": "---\nname: Python Black Formatting\n\non:\n  - push\n  - pull_request\n  - fork\n\njobs:\n  black:\n    name: Black\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@v3\n\n      - name: Set up Python\n        uses: actions/setup-python@v4\n        with:\n          python-version: '3.10'\n\n      - name: Install\n        run: |\n          python -m pip install --upgrade pip\n          pip install black==23.3.0\n\n      - name: Run Black\n        run: |\n          black --check --diff setup.py run.py ctypesgen/ \\\n            --exclude='${{ env.EXCLUDE }}'\n        env:\n          EXCLUDE: \".*tab.py|ctypesgen/parser/cgrammar.py|\\\n            ctypesgen/parser/lex.py|ctypesgen/parser/yacc.py\"\n"
  },
  {
    "path": ".github/workflows/flake8.yml",
    "content": "---\nname: Python Flake8 Code Quality\n\non:\n  - push\n  - pull_request\n  - fork\n\njobs:\n  flake8:\n    name: ${{ matrix.directory }}\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@v3\n\n      - name: Set up Python\n        uses: actions/setup-python@v4\n        with:\n          python-version: 3.8\n\n      - name: Install\n        run: |\n          python -m pip install --upgrade pip\n          pip install flake8==3.8.4\n\n      - name: Run Flake8\n        run: |\n          flake8 --count --statistics --show-source --jobs=$(nproc) .\n"
  },
  {
    "path": ".github/workflows/publish.yml",
    "content": "---\nname: Publish Python distributions to PyPI\n\non:\n  release:\n    types: [published]\njobs:\n  build-n-publish:\n    name: Build and publish Python distributions to PyPI\n    runs-on: ubuntu-latest\n    steps:\n      - name: Check out repository\n        uses: actions/checkout@v3\n        with:\n          ref: ${{ github.ref }}\n\n      - name: Set up Python 3.10\n        uses: actions/setup-python@v3\n        with:\n          python-version: '3.10'\n\n      - name: Install pypa/build\n        run: python -m pip install build --user\n\n      - name: Build a binary wheel and a source tarball\n        run: python -m build\n\n      - name: Publish distribution to GitHub\n        uses: softprops/action-gh-release@v1\n        with:\n          files: |\n              dist/*\n\n      - name: Publish distribution to PyPI\n        uses: pypa/gh-action-pypi-publish@release/v1\n        with:\n          password: ${{ secrets.PYPI_API_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "---\nname: Test\n\non:\n  - push\n  - pull_request\n  - fork\n\njobs:\n  setup-and-test:\n    name: Python-${{ matrix.python }} ${{ matrix.os }}\n    runs-on: ${{ matrix.os }}\n    strategy:\n      matrix:\n        include:\n          # Linux\n          - os: ubuntu-latest\n            python: 3.7\n          - os: ubuntu-latest\n            python: 3.8\n          - os: ubuntu-latest\n            python: 3.9\n          - os: ubuntu-latest\n            python: '3.10'\n          # macOS\n          - os: macos-latest\n            python: '3.10'\n          # Windows\n          - os: windows-latest\n            python: '3.10'\n      fail-fast: false\n\n    steps:\n      - uses: actions/checkout@v3\n\n      - name: Set up Python\n        uses: actions/setup-python@v4\n        with:\n          python-version: ${{ matrix.python }}\n\n      - name: Install Python dependencies\n        run: |\n          python -m pip install --upgrade pip\n          pip install pytest\n\n      - name: Run Test\n        run: |\n          pytest -v --showlocals tests/testsuite.py\n"
  },
  {
    "path": ".gitignore",
    "content": "# precompiled python files\n*.pyc\n\n# generated by distutils\nMANIFEST\ndist/\n\n# generated by setuptools\nbuild/\nctypesgen.egg-info/\n.eggs/\n.python-version\ntests/.python-version\n\n# Swap/backup editor files\n*.swp\n*~\n.pybuild/\n.tox/\n.idea/\ncore\n"
  },
  {
    "path": ".travis.yml",
    "content": "dist: bionic\nlanguage: python\npython: 3.7.8\n\ninstall:\n  - pip install tox\nscript:\n  - tox\n\nstages:\n  - name: tox\n  - name: publish to test.pypi.org\n    if: env(publish) = true AND type = api\n    # to publish, go to https://travis-ci.com/Alan-R/ctypesgen/\n    # click more options -> trigger build\n    # in textbox enter this:\n    # env:\n    #   publish: true\n  - name: verify\n    if: env(publish) = true AND type = api\n\njobs:\n  include:\n    - stage: tox\n      env: TOXENV=py37\n\n    - stage: tox\n      env: TOXENV=black\n\n    - stage: publish to test.pypi.org\n      install:\n        - pip install --upgrade build twine\n      script:\n        - python -m build\n        - python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/*\n\n    - stage: verify\n      install:\n        - pip install --index-url https://test.pypi.org/simple/ --no-deps ctypesgen\n      script:\n        - python -c 'import ctypesgen; print(ctypesgen.VERSION)'\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "## Change Log\n\n### Unreleased\n\n### v1.1.1\n\n- Fixed inconsistency in version output in released packages\n\n### v1.1.0\n\nThis release has a number of bug fixes in addition to a few new features.\nFollowing a complete transition to Python 3, with dropped Python 2 support,\nmajor work was made towards code modernization and quality.\n\n- The code is now Black formatted and Flake8 tested\n- Greatly improved unittest framework\n- Embedded PLY version updated to 3.11\n- New option: `--no-embed-preamble` create separate files for preamble and\n  loader instead of embedding in each output file\n- New option: `--allow-gnu-c` do not undefine `__GNUC__`\n- Fixed library loader search path on macOS\n- Fixed rare bug, processing (legacy) header files with MacRoman encoding\n  on macOS\n- Added full support for floating and integer constants\n- Added support for sized integer types on Windows\n- Added support to handle `restrict` and `_Noreturn` keywords\n- Added name formats to posix library loader\n- Fixed mapping of 'short int' to c_short\n- Git tags are now using `x.y.z` format\n\n### v1.0.2\n\nMany issues fixed. Parse gcc attributes more\n\nImplements automatic calling convention selection based on gcc attributes for\nstdcall/cdecl.\n\n- Simplify and unify library loader for various platforms. Improve library path\n  searches on Linux (parsed ld.so.conf includes now).\n- First implementaion of #pragma pack\n- First implemenation of #undef\n- Adds several command line options:\n  `-D` `--define`\n  `-U` `--undefine`\n  `--no-undefs`\n  `-P` `--strip-prefix`\n  `--debug-level`\n\n### v1.0.1\n\nFix handling of function prototypes \n\nOther minor improvments included.\n\n### v1.0.0\n\nPy2/Py3 support \n\nVarious development branches merged back\n\nIn addition to the various developments from the different branches, this\ntag also represents a code state that:\n\n- ties in with Travis CI to watch code developments\n- improves testsuite, including moving all JSON tests to testsuite\n- includes a decent Debian package build configuration\n- automatically creates a man page to be included in the Debian package\n"
  },
  {
    "path": "CONTRIBUTING",
    "content": "The best way to document a bug is to create a new test which demonstrates it. You should do that by adding a new test to:\n    ctypesgen/test/testsuite.py\nThis is *required* for any patches you might provide. You must provide tests to demonstrate your bug fix or enhancement.\n\nAll patches will be have to pass unit tests. You can run the tests by running \"tox\" with no options.\n\nAll our Python code is formatted with using the \"black\" command with a 100 character line length.\n    black --line-length 100\nYou can verify your patch formatting before you submit it by running \"tox -e black\".\n"
  },
  {
    "path": "LICENSE",
    "content": "Copyright (c) 2007-2022, Ctypesgen Developers\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice,\n   this list of conditions and the following disclaimer.\n2. Redistributions in binary form must reproduce the above copyright\n   notice, this list of conditions and the following disclaimer in the\n   documentation and/or other materials provided with the distribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\nARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\nLIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\nCONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\nSUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\nINTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\nARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGE.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "graft ctypesgen\nrecursive-exclude ctypesgen .gitignore\nglobal-exclude *.py[cod]\ninclude ctypesgen/VERSION\n"
  },
  {
    "path": "README.md",
    "content": "                              ctypesgen\n                              ---------\n\n                  (c) Ctypesgen developers 2007-2022\n                 https://github.com/ctypesgen/ctypesgen\n\n_ctypesgen_ is a pure-python ctypes wrapper generator. It parses C header files\nand creates a wrapper for libraries based on what it finds.\n\nPreprocessor macros are handled in a manner consistent with typical C code.\nPreprocessor macro functions are translated into Python functions that are then\nmade available to the user of the newly-generated Python wrapper library.\n\nIt can also output JSON, which can be used with Mork, which generates bindings\nfor Lua, using the alien module (which binds libffi to Lua).\n\n## Documentation\n\nSee https://github.com/ctypesgen/ctypesgen/wiki for full documentation.\n\nRun `ctypesgen --help` for full range of available options.\n\n## Installation\n\n_ctypesgen_ can be installed by `pip install ctypesgen`. It requires Python 3.7\nto run.\n\n## Basic Usage\n\nThis project automatically generates ctypes wrappers for header files written\nin C.\n\nFor example, if you'd like to generate Neon bindings, you can do so using this\nrecipe (using a standard pip install):\n\n```sh\nctypesgen -lneon /usr/local/include/neon/ne_*.h -o neon.py\n```\n\nSome libraries, such as APR, need special flags to compile. You can pass these\nflags in on the command line.\n\nFor example:\n\n```sh\nFLAGS = `apr-1-config --cppflags --includes`\nctypesgen $FLAGS -llibapr-1.so $HOME/include/apr-1/apr*.h -o apr.py\n```\n\nSometimes, libraries will depend on each other. You can specify these\ndependencies using -mmodule, where module is the name of the dependency module.\n\nHere's an example for apr_util:\n\n```sh\nctypesgen $FLAGS -llibaprutil-1.so $HOME/include/apr-1/ap[ru]*.h \\\n\t-mapr -o apr_util.py\n```\n\nIf you want JSON output (e.g. for generating Lua bindings), use\n\n```\n--output-language=json\n```\n\nWhen outputting JSON, you will probably also want to use\n\n```\n--all-headers --builtin-symbols --no-stddef-types --no-gnu-types\n--no-python-types\n```\n\n## Related Software of Interest\n\n_libffi_ is a portable Foreign Function Interface library:\nhttp://sources.redhat.com/libffi/\n\n_Mork_, the friendly alien, can be found at:\nhttps://github.com/rrthomas/mork\n\n## License\n\n_ctypesgen_ is distributed under the New (2-clause) BSD License:\nhttp://www.opensource.org/licenses/bsd-license.php\n"
  },
  {
    "path": "ctypesgen/.gitignore",
    "content": "VERSION\n"
  },
  {
    "path": "ctypesgen/__init__.py",
    "content": "\"\"\"\nCtypesgencore is the module that contains the main body of ctypesgen - in fact,\nit contains everything but the command-line interface.\n\nctypesgen's job is divided into three steps:\n\nStep 1: Parse\n\nCtypesgen reads the input header files and parses them. It generates a list of\nfunction, variable, struct, union, enum, constant, typedef, and macro\ndescriptions from the input files. These descriptions are encapsulated as\nctypesgen.descriptions.Description objects.\n\nThe package ctypesgen.parser is responsible for the parsing stage.\n\nStep 2: Process\n\nCtypesgen processes the list of descriptions from the parsing stage. This is\nthe stage where ctypesgen resolves name conflicts and filters descriptions using\nthe regexes specified on the command line. Other processing steps take place\nat this stage, too. When processing is done, ctypesgen finalizes which\ndescriptions will be included in the output file.\n\nThe package ctypesgen.processor is responsible for the processing stage.\n\nStep 3: Print\n\nCtypesgen writes the descriptions to the output file, along with a header.\n\nThe package ctypesgen.printer is responsible for the printing stage.\n\nThere are three modules in ctypesgen that describe the format that the\nparser, processor, and printer modules use to pass information. They are:\n\n* descriptions: Classes to represent the descriptions.\n\n* ctypedecls: Classes to represent C types.\n\n* expressions: Classes to represent an expression in a language-independent\nformat.\n\"\"\"\n\n__all__ = [\n    \"parser\",\n    \"processor\",\n    \"printer\",\n    \"descriptions\",\n    \"ctypedescs\",\n    \"expressions\",\n    \"messages\",\n    \"options\",\n    \"version\",\n]\n\n# Workhorse modules\nfrom . import parser\nfrom . import processor\nfrom . import printer_python\nfrom . import version\n\n# Modules describing internal format\nfrom . import descriptions\nfrom . import ctypedescs\nfrom . import expressions\n\n# Helper modules\nfrom . import messages\nfrom . import options\n\ntry:\n    from . import printer_json\nexcept ImportError:\n    pass\n\n__version__ = version.VERSION.partition(\"-\")[-1]\nVERSION = __version__\n\nprinter = printer_python  # Default the printer to generating Python\n"
  },
  {
    "path": "ctypesgen/__main__.py",
    "content": "\"\"\"\nCommand-line interface for ctypesgen\n\"\"\"\n\nimport argparse\n\nfrom ctypesgen import (\n    messages as msgs,\n    options as core_options,\n    parser as core_parser,\n    printer_python,\n    printer_json,\n    processor,\n    version,\n)\n\n\ndef find_names_in_modules(modules):\n    names = set()\n    for module in modules:\n        try:\n            mod = __import__(module)\n        except Exception:\n            pass\n        else:\n            names.update(dir(mod))\n    return names\n\n\ndef main(givenargs=None):\n    # TODO(geisserml) In the future, convert action=\"append\" to nargs=\"*\" - that's nicer to use\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"--version\",\n        action=\"version\",\n        version=version.VERSION_NUMBER,\n    )\n\n    # Parameters\n    parser.add_argument(\"headers\", nargs=\"+\", help=\"Sequence of header files\")\n    parser.add_argument(\n        \"-o\",\n        \"--output\",\n        metavar=\"FILE\",\n        help=\"write wrapper to FILE [default stdout]\",\n    )\n    parser.add_argument(\n        \"-l\",\n        \"--library\",\n        dest=\"libraries\",\n        action=\"append\",\n        default=[],\n        metavar=\"LIBRARY\",\n        help=\"link to LIBRARY\",\n    )\n    parser.add_argument(\n        \"--include\",\n        dest=\"other_headers\",\n        action=\"append\",\n        default=[],\n        metavar=\"HEADER\",\n        help=\"include system header HEADER (e.g. stdio.h or stdlib.h)\",\n    )\n    parser.add_argument(\n        \"-m\",\n        \"--module\",\n        \"--link-module\",\n        action=\"append\",\n        dest=\"modules\",\n        metavar=\"MODULE\",\n        default=[],\n        help=\"use symbols from Python module MODULE\",\n    )\n    parser.add_argument(\n        \"-I\",\n        \"--includedir\",\n        action=\"append\",\n        dest=\"include_search_paths\",\n        default=[],\n        metavar=\"INCLUDEDIR\",\n        help=\"add INCLUDEDIR as a directory to search for headers\",\n    )\n    parser.add_argument(\n        \"-L\",\n        \"-R\",\n        \"--rpath\",\n        \"--libdir\",\n        action=\"append\",\n        dest=\"universal_libdirs\",\n        default=[],\n        metavar=\"LIBDIR\",\n        help=\"Add LIBDIR to the search path (both compile-time and run-time)\",\n    )\n    parser.add_argument(\n        \"--compile-libdir\",\n        action=\"append\",\n        dest=\"compile_libdirs\",\n        metavar=\"LIBDIR\",\n        default=[],\n        help=\"Add LIBDIR to the compile-time library search path.\",\n    )\n    parser.add_argument(\n        \"--runtime-libdir\",\n        action=\"append\",\n        dest=\"runtime_libdirs\",\n        metavar=\"LIBDIR\",\n        default=[],\n        help=\"Add LIBDIR to the run-time library search path.\",\n    )\n    parser.add_argument(\n        \"--no-embed-preamble\",\n        action=\"store_false\",\n        dest=\"embed_preamble\",\n        default=True,\n        help=\"Do not embed preamble and loader in output file. \"\n        \"Defining --output as a file and --output-language to \"\n        \"Python is a prerequisite.\",\n    )\n\n    # Parser options\n    parser.add_argument(\n        \"--cpp\",\n        dest=\"cpp\",\n        default=\"gcc -E\",\n        help=\"The command to invoke the c preprocessor, including any \"\n        \"necessary options (default: gcc -E)\",\n    )\n    parser.add_argument(\n        \"--allow-gnu-c\",\n        action=\"store_true\",\n        dest=\"allow_gnu_c\",\n        default=False,\n        help=\"Specify whether to undefine the '__GNUC__' macro, \"\n        \"while invoking the C preprocessor.\\n\"\n        \"(default: False. i.e. ctypesgen adds an implicit undefine using '-U __GNUC__'.)\\n\"\n        \"Specify this flag to avoid ctypesgen undefining '__GNUC__' as shown above.\",\n    )\n    parser.add_argument(\n        \"-D\",\n        \"--define\",\n        action=\"append\",\n        dest=\"cpp_defines\",\n        metavar=\"MACRO\",\n        default=[],\n        help=\"Add a definition to the preprocessor via commandline\",\n    )\n    parser.add_argument(\n        \"-U\",\n        \"--undefine\",\n        action=\"append\",\n        dest=\"cpp_undefines\",\n        metavar=\"NAME\",\n        default=[],\n        help=\"Instruct the preprocessor to undefine the specified macro via commandline\",\n    )\n    parser.add_argument(\n        \"--save-preprocessed-headers\",\n        metavar=\"FILENAME\",\n        dest=\"save_preprocessed_headers\",\n        default=None,\n        help=\"Save the preprocessed headers to the specified FILENAME\",\n    )\n    parser.add_argument(\n        \"--optimize-lexer\",\n        dest=\"optimize_lexer\",\n        action=\"store_true\",\n        default=False,\n        help=\"Run the lexer in optimized mode.  This mode requires write \"\n        \"access to lextab.py file stored within the ctypesgen package.\",\n    )\n\n    # Processor options\n    parser.add_argument(\n        \"-a\",\n        \"--all-headers\",\n        action=\"store_true\",\n        dest=\"all_headers\",\n        default=False,\n        help=\"include symbols from all headers, including system headers\",\n    )\n    parser.add_argument(\n        \"--builtin-symbols\",\n        action=\"store_true\",\n        dest=\"builtin_symbols\",\n        default=False,\n        help=\"include symbols automatically generated by the preprocessor\",\n    )\n    parser.add_argument(\n        \"--no-macros\",\n        action=\"store_false\",\n        dest=\"include_macros\",\n        default=True,\n        help=\"Don't output macros.\",\n    )\n    parser.add_argument(\n        \"--no-undefs\",\n        action=\"store_false\",\n        dest=\"include_undefs\",\n        default=True,\n        help=\"Do not remove macro definitions as per #undef directives\",\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--include-symbols\",\n        action=\"append\",\n        dest=\"include_symbols\",\n        metavar=\"REGEXPR\",\n        default=[],\n        help=\"Regular expression for symbols to always include.  Multiple \"\n        \"instances of this option will be combined into a single expression \"\n        \"doing something like '(expr1|expr2|expr3)'.\",\n    )\n    parser.add_argument(\n        \"-x\",\n        \"--exclude-symbols\",\n        action=\"append\",\n        dest=\"exclude_symbols\",\n        metavar=\"REGEXPR\",\n        default=[],\n        help=\"Regular expression for symbols to exclude.  Multiple instances \"\n        \"of this option will be combined into a single expression doing \"\n        \"something like '(expr1|expr2|expr3)'.\",\n    )\n    parser.add_argument(\n        \"--no-stddef-types\",\n        action=\"store_true\",\n        dest=\"no_stddef_types\",\n        default=False,\n        help=\"Do not support extra C types from stddef.h\",\n    )\n    parser.add_argument(\n        \"--no-gnu-types\",\n        action=\"store_true\",\n        dest=\"no_gnu_types\",\n        default=False,\n        help=\"Do not support extra GNU C types\",\n    )\n    parser.add_argument(\n        \"--no-python-types\",\n        action=\"store_true\",\n        dest=\"no_python_types\",\n        default=False,\n        help=\"Do not support extra C types built in to Python\",\n    )\n    parser.add_argument(\n        \"--no-load-library\",\n        action=\"store_true\",\n        dest=\"no_load_library\",\n        default=False,\n        help=\"Do not try to load library during the processing\",\n    )\n\n    # Printer options\n    parser.add_argument(\n        \"--header-template\",\n        dest=\"header_template\",\n        default=None,\n        metavar=\"TEMPLATE\",\n        help=\"Use TEMPLATE as the header template in the output file.\",\n    )\n    parser.add_argument(\n        \"--strip-build-path\",\n        dest=\"strip_build_path\",\n        default=None,\n        metavar=\"BUILD_PATH\",\n        help=\"Strip build path from header paths in the wrapper file.\",\n    )\n    parser.add_argument(\n        \"--insert-file\",\n        dest=\"inserted_files\",\n        default=[],\n        action=\"append\",\n        metavar=\"FILENAME\",\n        help=\"Add the contents of FILENAME to the end of the wrapper file.\",\n    )\n    parser.add_argument(\n        \"--output-language\",\n        dest=\"output_language\",\n        metavar=\"LANGUAGE\",\n        default=\"py\",\n        choices=(\"py\", \"json\"),\n        help=\"Choose output language\",\n    )\n    parser.add_argument(\n        \"-P\",\n        \"--strip-prefix\",\n        dest=\"strip_prefixes\",\n        default=[],\n        action=\"append\",\n        metavar=\"REGEXPR\",\n        help=\"Regular expression to match prefix to strip from all symbols.  \"\n        \"Multiple instances of this option will be combined into a single \"\n        \"expression doing something like '(expr1|expr2|expr3)'.\",\n    )\n\n    # Error options\n    parser.add_argument(\n        \"--all-errors\",\n        action=\"store_true\",\n        default=False,\n        dest=\"show_all_errors\",\n        help=\"Display all warnings and errors even if they would not affect output.\",\n    )\n    parser.add_argument(\n        \"--show-long-errors\",\n        action=\"store_true\",\n        default=False,\n        dest=\"show_long_errors\",\n        help=\"Display long error messages instead of abbreviating error messages.\",\n    )\n    parser.add_argument(\n        \"--no-macro-warnings\",\n        action=\"store_false\",\n        default=True,\n        dest=\"show_macro_warnings\",\n        help=\"Do not print macro warnings.\",\n    )\n    parser.add_argument(\n        \"--debug-level\",\n        dest=\"debug_level\",\n        default=0,\n        type=int,\n        help=\"Run ctypesgen with specified debug level (also applies to yacc parser)\",\n    )\n\n    parser.set_defaults(**core_options.default_values)\n    args = parser.parse_args(givenargs)\n\n    # Important: don't use +=, it modifies the original list instead of\n    # creating a new one. This can be problematic with repeated API calls.\n    args.compile_libdirs = args.compile_libdirs + args.universal_libdirs\n    args.runtime_libdirs = args.runtime_libdirs + args.universal_libdirs\n\n    # Figure out what names will be defined by imported Python modules\n    args.other_known_names = find_names_in_modules(args.modules)\n\n    if len(args.libraries) == 0:\n        msgs.warning_message(\"No libraries specified\", cls=\"usage\")\n\n    # Fetch printer for the requested output language\n    if args.output_language == \"py\":\n        printer = printer_python.WrapperPrinter\n    elif args.output_language == \"json\":\n        printer = printer_json.WrapperPrinter\n    else:\n        assert False  # handled by argparse choices\n\n    # Step 1: Parse\n    descriptions = core_parser.parse(args.headers, args)\n\n    # Step 2: Process\n    processor.process(descriptions, args)\n\n    # Step 3: Print\n    printer(args.output, args, descriptions)\n\n    msgs.status_message(\"Wrapping complete.\")\n\n    # Correct what may be a common mistake\n    if descriptions.all == []:\n        if not args.all_headers:\n            msgs.warning_message(\n                \"There wasn't anything of use in the \"\n                \"specified header file(s). Perhaps you meant to run with \"\n                \"--all-headers to include objects from included sub-headers? \",\n                cls=\"usage\",\n            )\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "ctypesgen/ctypedescs.py",
    "content": "\"\"\"\nctypesgen.ctypedescs contains classes to represent a C type. All of them\nclasses are subclasses of CtypesType.\n\nUnlike in previous versions of ctypesgen, CtypesType and its subclasses are\ncompletely independent of the parser module.\n\nThe most important method of CtypesType and its subclasses is the py_string\nmethod. str(ctype) returns a string which, when evaluated in the wrapper\nat runtime, results in a ctypes type object.\n\nFor example, a CtypesType\nrepresenting an array of four integers could be created using:\n\n>>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4)\n\nstr(ctype) would evaluate to \"c_int * 4\".\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\nctypes_type_map = {\n    # typename   signed  longs\n    (\"void\", True, 0): \"None\",\n    (\"int\", True, 0): \"c_int\",\n    (\"int\", False, 0): \"c_uint\",\n    (\"int\", True, 1): \"c_long\",\n    (\"int\", False, 1): \"c_ulong\",\n    (\"char\", True, 0): \"c_char\",\n    (\"char\", False, 0): \"c_ubyte\",\n    (\"short\", True, 0): \"c_short\",\n    (\"short\", False, 0): \"c_ushort\",\n    (\"float\", True, 0): \"c_float\",\n    (\"double\", True, 0): \"c_double\",\n    (\"double\", True, 1): \"c_longdouble\",\n    (\"int8_t\", True, 0): \"c_int8\",\n    (\"__int8_t\", True, 0): \"c_int8\",\n    (\"__int8\", True, 0): \"c_int8\",\n    (\"int16_t\", True, 0): \"c_int16\",\n    (\"__int16_t\", True, 0): \"c_int16\",\n    (\"__int16\", True, 0): \"c_int16\",\n    (\"int32_t\", True, 0): \"c_int32\",\n    (\"__int32_t\", True, 0): \"c_int32\",\n    (\"__int32\", True, 0): \"c_int32\",\n    (\"int64_t\", True, 0): \"c_int64\",\n    (\"__int64\", True, 0): \"c_int64\",\n    (\"__int64_t\", True, 0): \"c_int64\",\n    (\"uint8_t\", False, 0): \"c_uint8\",\n    (\"__uint8\", False, 0): \"c_uint8\",\n    (\"__uint8_t\", False, 0): \"c_uint8\",\n    (\"uint16_t\", False, 0): \"c_uint16\",\n    (\"__uint16\", False, 0): \"c_uint16\",\n    (\"__uint16_t\", False, 0): \"c_uint16\",\n    (\"uint32_t\", False, 0): \"c_uint32\",\n    (\"__uint32\", False, 0): \"c_uint32\",\n    (\"__uint32_t\", False, 0): \"c_uint32\",\n    (\"uint64_t\", False, 0): \"c_uint64\",\n    (\"__uint64\", False, 0): \"c_uint64\",\n    (\"__uint64_t\", False, 0): \"c_uint64\",\n    (\"_Bool\", True, 0): \"c_bool\",\n    (\"bool\", True, 0): \"c_bool\",\n}\n\nctypes_type_map_python_builtin = {\n    (\"int\", True, -1): \"c_short\",\n    (\"int\", False, -1): \"c_ushort\",\n    (\"int\", True, 2): \"c_longlong\",\n    (\"int\", False, 2): \"c_ulonglong\",\n    (\"size_t\", True, 0): \"c_size_t\",\n    (\"off64_t\", True, 0): \"c_int64\",\n    (\"wchar_t\", True, 0): \"c_wchar\",\n    (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\",  # Requires definition in preamble\n    (\"ssize_t\", True, 0): \"c_ptrdiff_t\",  # Requires definition in preamble\n    (\"va_list\", True, 0): \"c_void_p\",\n}\n\n\n# This protocol is used for walking type trees.\nclass CtypesTypeVisitor(object):\n    def visit_struct(self, struct):\n        pass\n\n    def visit_enum(self, enum):\n        pass\n\n    def visit_typedef(self, name):\n        pass\n\n    def visit_error(self, error, cls):\n        pass\n\n    def visit_identifier(self, identifier):\n        # This one comes from inside ExpressionNodes. There may be\n        # ExpressionNode objects in array count expressions.\n        pass\n\n\ndef visit_type_and_collect_info(ctype):\n    class Visitor(CtypesTypeVisitor):\n        def visit_struct(self, struct):\n            structs.append(struct)\n\n        def visit_enum(self, enum):\n            enums.append(enum)\n\n        def visit_typedef(self, typedef):\n            typedefs.append(typedef)\n\n        def visit_error(self, error, cls):\n            errors.append((error, cls))\n\n        def visit_identifier(self, identifier):\n            identifiers.append(identifier)\n\n    structs = []\n    enums = []\n    typedefs = []\n    errors = []\n    identifiers = []\n    v = Visitor()\n    ctype.visit(v)\n    return structs, enums, typedefs, errors, identifiers\n\n\n# Remove one level of indirection from function pointer; needed for typedefs\n# and function parameters.\ndef remove_function_pointer(t):\n    if type(t) == CtypesPointer and type(t.destination) == CtypesFunction:\n        return t.destination\n    elif type(t) == CtypesPointer:\n        t.destination = remove_function_pointer(t.destination)\n        return t\n    else:\n        return t\n\n\nclass CtypesType(object):\n    def __init__(self):\n        super(CtypesType, self).__init__()\n        self.errors = []\n\n    def __repr__(self):\n        return '<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string())\n\n    def error(self, message, cls=None):\n        self.errors.append((message, cls))\n\n    def visit(self, visitor):\n        for error, cls in self.errors:\n            visitor.visit_error(error, cls)\n\n\nclass CtypesSimple(CtypesType):\n    \"\"\"Represents a builtin type, like \"char\" or \"int\".\"\"\"\n\n    def __init__(self, name, signed, longs):\n        super(CtypesSimple, self).__init__()\n        self.name = name\n        self.signed = signed\n        self.longs = longs\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return ctypes_type_map[(self.name, self.signed, self.longs)]\n\n\nclass CtypesSpecial(CtypesType):\n    def __init__(self, name):\n        super(CtypesSpecial, self).__init__()\n        self.name = name\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return self.name\n\n\nclass CtypesTypedef(CtypesType):\n    \"\"\"Represents a type defined by a typedef.\"\"\"\n\n    def __init__(self, name):\n        super(CtypesTypedef, self).__init__()\n        self.name = name\n\n    def visit(self, visitor):\n        if not self.errors:\n            visitor.visit_typedef(self.name)\n        super(CtypesTypedef, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return self.name\n\n\nclass CtypesBitfield(CtypesType):\n    def __init__(self, base, bitfield):\n        super(CtypesBitfield, self).__init__()\n        self.base = base\n        self.bitfield = bitfield\n\n    def visit(self, visitor):\n        self.base.visit(visitor)\n        super(CtypesBitfield, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return self.base.py_string()\n\n\nclass CtypesPointer(CtypesType):\n    def __init__(self, destination, qualifiers):\n        super(CtypesPointer, self).__init__()\n        self.destination = destination\n        self.qualifiers = qualifiers\n\n    def visit(self, visitor):\n        if self.destination:\n            self.destination.visit(visitor)\n        super(CtypesPointer, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"POINTER(%s)\" % self.destination.py_string()\n\n\nclass CtypesArray(CtypesType):\n    def __init__(self, base, count):\n        super(CtypesArray, self).__init__()\n        self.base = base\n        self.count = count\n\n    def visit(self, visitor):\n        self.base.visit(visitor)\n        if self.count:\n            self.count.visit(visitor)\n        super(CtypesArray, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        if self.count is None:\n            return \"POINTER(%s)\" % self.base.py_string()\n        if type(self.base) == CtypesArray:\n            return \"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False))\n        else:\n            return \"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False))\n\n\nclass CtypesNoErrorCheck(object):\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"None\"\n\n    def __bool__(self):\n        return False\n\n    __nonzero__ = __bool__\n\n\nclass CtypesPointerCast(object):\n    def __init__(self, target):\n        self.target = target\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"lambda v,*a : cast(v, {})\".format(self.target.py_string())\n\n\nclass CtypesFunction(CtypesType):\n    def __init__(self, restype, parameters, variadic, attrib=dict()):\n        super(CtypesFunction, self).__init__()\n        self.restype = restype\n        self.errcheck = CtypesNoErrorCheck()\n\n        # Don't allow POINTER(None) (c_void_p) as a restype... causes errors\n        # when ctypes automagically returns it as an int.\n        # Instead, convert to POINTER(c_void).  c_void is not a ctypes type,\n        # you can make it any arbitrary type.\n        if (\n            type(self.restype) == CtypesPointer\n            and type(self.restype.destination) == CtypesSimple\n            and self.restype.destination.name == \"void\"\n        ):\n            # we will provide a means of converting this to a c_void_p\n            self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ())\n            self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\"))\n\n        # Return \"String\" instead of \"POINTER(c_char)\"\n        if self.restype.py_string() == \"POINTER(c_char)\":\n            if \"const\" in self.restype.qualifiers:\n                self.restype = CtypesSpecial(\"c_char_p\")\n            else:\n                self.restype = CtypesSpecial(\"String\")\n\n        self.argtypes = [remove_function_pointer(p) for p in parameters]\n        self.variadic = variadic\n        self.attrib = attrib\n\n    def visit(self, visitor):\n        self.restype.visit(visitor)\n        for a in self.argtypes:\n            a.visit(visitor)\n        super(CtypesFunction, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % (\n            self.restype.py_string(),\n            \", \".join([a.py_string() for a in self.argtypes]),\n        )\n\n\nlast_tagnum = 0\n\n\ndef anonymous_struct_tagnum():\n    global last_tagnum\n    last_tagnum += 1\n    return last_tagnum\n\n\ndef fmt_anonymous_struct_tag(num):\n    return \"anon_%d\" % num\n\n\ndef anonymous_struct_tag():\n    return fmt_anonymous_struct_tag(anonymous_struct_tagnum())\n\n\nclass CtypesStruct(CtypesType):\n    def __init__(self, tag, attrib, variety, members, src=None):\n        super(CtypesStruct, self).__init__()\n        self.tag = tag\n        self.attrib = attrib\n        self.variety = variety  # \"struct\" or \"union\"\n        self.members = members\n\n        if type(self.tag) == int or not self.tag:\n            if type(self.tag) == int:\n                self.tag = fmt_anonymous_struct_tag(self.tag)\n            else:\n                self.tag = anonymous_struct_tag()\n            self.anonymous = True\n        else:\n            self.anonymous = False\n\n        if self.members is None:\n            self.opaque = True\n        else:\n            self.opaque = False\n\n        self.src = src\n\n    def get_required_types(self):\n        types = super(CtypesStruct, self).get_required_types()\n        types.add((self.variety, self.tag))\n        return types\n\n    def visit(self, visitor):\n        visitor.visit_struct(self)\n        if not self.opaque:\n            for name, ctype in self.members:\n                ctype.visit(visitor)\n        super(CtypesStruct, self).visit(visitor)\n\n    def get_subtypes(self):\n        if self.opaque:\n            return set()\n        else:\n            return set([m[1] for m in self.members])\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"%s_%s\" % (self.variety, self.tag)\n\n\nlast_tagnum = 0\n\n\ndef anonymous_enum_tag():\n    global last_tagnum\n    last_tagnum += 1\n    return \"anon_%d\" % last_tagnum\n\n\nclass CtypesEnum(CtypesType):\n    def __init__(self, tag, enumerators, src=None):\n        super(CtypesEnum, self).__init__()\n        self.tag = tag\n        self.enumerators = enumerators\n\n        if not self.tag:\n            self.tag = anonymous_enum_tag()\n            self.anonymous = True\n        else:\n            self.anonymous = False\n\n        if self.enumerators is None:\n            self.opaque = True\n        else:\n            self.opaque = False\n\n        self.src = src\n\n    def visit(self, visitor):\n        visitor.visit_enum(self)\n        super(CtypesEnum, self).visit(visitor)\n\n    def py_string(self, ignore_can_be_ctype=None):\n        return \"enum_%s\" % self.tag\n"
  },
  {
    "path": "ctypesgen/descriptions.py",
    "content": "\"\"\"\nctypesgen.descriptions contains classes to represent a description of a\nstruct, union, enum, function, constant, variable, or macro. All the\ndescription classes are subclassed from an abstract base class, Description.\nThe descriptions module also contains a class, DescriptionCollection, to hold\nlists of Description objects.\n\"\"\"\n\n\nclass DescriptionCollection(object):\n    \"\"\"Represents a collection of Descriptions.\"\"\"\n\n    def __init__(\n        self, constants, typedefs, structs, enums, functions, variables, macros, all, output_order\n    ):\n        self.constants = constants\n        self.typedefs = typedefs\n        self.structs = structs\n        self.enums = enums\n        self.functions = functions\n        self.variables = variables\n        self.macros = macros\n        self.all = all\n        self.output_order = output_order\n\n\nclass Description(object):\n    \"\"\"Represents a constant, typedef, struct, function, variable, enum,\n    or macro description. Description is an abstract base class.\"\"\"\n\n    def __init__(self, src=None):\n        super(Description, self).__init__()\n        self.src = src  # A tuple of (filename, lineno)\n\n        # If object will be included in output file. Values are \"yes\", \"never\",\n        # and \"if_needed\".\n        self.include_rule = \"yes\"\n\n        # A word about requirements, and dependents:\n        # If X requires Y, Y is in X.requirements.\n        # If X is in Y.requirements, then Y is in X.dependents.\n        self.requirements = set()\n        self.dependents = set()\n\n        # If the processor module finds a fatal error that prevents a\n        # a description from being output, then it appends a string describing\n        # the problem to 'errors'. If it finds a nonfatal error, it appends a\n        # string to 'warnings'. If the description would have been output, then\n        # the errors and warnings are printed.\n\n        # If there is anything in 'errors' after processing is complete, the\n        # description is not output.\n\n        self.errors = []\n        self.warnings = []\n\n    def add_requirements(self, reqs):\n        self.requirements = self.requirements.union(reqs)\n        for req in reqs:\n            req.dependents.add(self)\n\n    def error(self, msg, cls=None):\n        self.errors.append((msg, cls))\n\n    def warning(self, msg, cls=None):\n        self.warnings.append((msg, cls))\n\n    def __repr__(self):\n        return \"<Description: %s>\" % self.casual_name()\n\n    def casual_name(self):\n        \"\"\"Return a name to show the user.\"\"\"\n\n    def py_name(self):\n        \"\"\"Return the name associated with this description in Python code.\"\"\"\n\n    def c_name(self):\n        \"\"\"Return the name associated with this description in C code.\"\"\"\n\n\nclass ConstantDescription(Description):\n    \"\"\"Simple class to contain information about a constant.\"\"\"\n\n    def __init__(self, name, value, src=None):\n        super(ConstantDescription, self).__init__(src)\n        # Name of constant, a string\n        self.name = name\n        # Value of constant, as an ExpressionNode object\n        self.value = value\n\n    def casual_name(self):\n        return 'Constant \"%s\"' % self.name\n\n    def py_name(self):\n        return self.name\n\n    def c_name(self):\n        return self.name\n\n\nclass TypedefDescription(Description):\n    \"\"\"Simple container class for a type definition.\"\"\"\n\n    def __init__(self, name, ctype, src=None):\n        super(TypedefDescription, self).__init__(src)\n        self.name = name  # Name, a string\n        self.ctype = ctype  # The base type as a ctypedescs.CtypeType object\n\n    def casual_name(self):\n        return 'Typedef \"%s\"' % self.name\n\n    def py_name(self):\n        return self.name\n\n    def c_name(self):\n        return self.name\n\n\nclass StructDescription(Description):\n    \"\"\"Simple container class for a structure or union definition.\"\"\"\n\n    def __init__(self, tag, attrib, variety, members, opaque, ctype, src=None):\n        super(StructDescription, self).__init__(src)\n        # The name of the structure minus the \"struct\" or \"union\"\n        self.tag = tag\n        self.attrib = attrib\n        # A string \"struct\" or \"union\"\n        self.variety = variety\n        # A list of pairs of (name,ctype)\n        self.members = members\n        # True if struct body was not specified in header file\n        self.opaque = opaque\n        # The original CtypeStruct that created the struct\n        self.ctype = ctype\n\n    def casual_name(self):\n        return '%s \"%s\"' % (self.variety.capitalize(), self.tag)\n\n    def py_name(self):\n        return \"%s_%s\" % (self.variety, self.tag)\n\n    def c_name(self):\n        return \"%s %s\" % (self.variety, self.tag)\n\n\nclass EnumDescription(Description):\n    \"\"\"Simple container class for an enum definition.\"\"\"\n\n    def __init__(self, tag, members, ctype, src=None):\n        super(EnumDescription, self).__init__(src)\n        # The name of the enum, minus the \"enum\"\n        self.tag = tag\n        # A list of (name,value) pairs where value is a number\n        self.members = members\n        # The original CtypeEnum that created the enum\n        self.ctype = ctype\n\n    def casual_name(self):\n        return 'Enum \"%s\"' % self.tag\n\n    def py_name(self):\n        return \"enum_%s\" % self.tag\n\n    def c_name(self):\n        return \"enum %s\" % self.tag\n\n\nclass FunctionDescription(Description):\n    \"\"\"Simple container class for a C function.\"\"\"\n\n    def __init__(self, name, restype, argtypes, errcheck, variadic, attrib, src):\n        super(FunctionDescription, self).__init__(src)\n        # Name, a string\n        self.name = name\n        # Name according to C - stored in case description is renamed\n        self.cname = name\n        # A ctype representing return type\n        self.restype = restype\n        # A list of ctypes representing the argument types\n        self.argtypes = argtypes\n        # An optional error checker/caster\n        self.errcheck = errcheck\n        # Does this function accept a variable number of arguments?\n        self.variadic = variadic\n        # The set of attributes applied to the function (e.g. stdcall)\n        self.attrib = attrib\n\n    def casual_name(self):\n        return 'Function \"%s\"' % self.name\n\n    def py_name(self):\n        return self.name\n\n    def c_name(self):\n        return self.cname\n\n\nclass VariableDescription(Description):\n    \"\"\"Simple container class for a C variable declaration.\"\"\"\n\n    def __init__(self, name, ctype, src=None):\n        super(VariableDescription, self).__init__(src)\n        # Name, a string\n        self.name = name\n        # Name according to C - stored in case description is renamed\n        self.cname = name\n        # The type of the variable\n        self.ctype = ctype\n\n    def casual_name(self):\n        return 'Variable \"%s\"' % self.name\n\n    def py_name(self):\n        return self.name\n\n    def c_name(self):\n        return self.cname\n\n\nclass MacroDescription(Description):\n    \"\"\"Simple container class for a C macro.\"\"\"\n\n    def __init__(self, name, params, expr, src=None):\n        super(MacroDescription, self).__init__(src)\n        self.name = name\n        self.params = params\n        self.expr = expr  # ExpressionNode for the macro's body\n\n    def casual_name(self):\n        return 'Macro \"%s\"' % self.name\n\n    def py_name(self):\n        return self.name\n\n    def c_name(self):\n        return self.name\n\n\nclass UndefDescription(Description):\n    \"\"\"Simple container class for a preprocessor #undef directive.\"\"\"\n\n    def __init__(self, macro, src=None):\n        super(UndefDescription, self).__init__(src)\n        self.include_rule = \"if_needed\"\n\n        self.macro = macro\n\n    def casual_name(self):\n        return 'Undef \"%s\"' % self.macro.name\n\n    def py_name(self):\n        return \"#undef:%s\" % self.macro.name\n\n    def c_name(self):\n        return \"#undef %s\" % self.macro.name\n"
  },
  {
    "path": "ctypesgen/expressions.py",
    "content": "\"\"\"\nThe expressions module contains classes to represent an expression. The main\nclass is ExpressionNode. ExpressionNode's most useful method is py_string(),\nwhich returns a Python string representing that expression.\n\"\"\"\n\nimport warnings\nimport keyword\n\nfrom ctypesgen.ctypedescs import (\n    CtypesPointer,\n    CtypesSimple,\n    CtypesStruct,\n    CtypesType,\n)\n\n# Right now, the objects in this module are all oriented toward evaluation.\n# However, they don't have to be, since ctypes objects are mutable. For example,\n# shouldn't it be possible to translate the macro:\n#\n#   #define INCREMENT(x) ++x\n#\n# into Python? The resulting code should be:\n#\n#   def INCREMENT(x):\n#       x.value+=1\n#       return x.value\n#\n# On the other hand, this would be a challenge to write.\n\n\nclass EvaluationContext(object):\n    \"\"\"Interface for evaluating expression nodes.\"\"\"\n\n    def evaluate_identifier(self, name):\n        warnings.warn('Attempt to evaluate identifier \"%s\" failed' % name)\n        return 0\n\n    def evaluate_sizeof(self, object):\n        warnings.warn('Attempt to evaluate sizeof object \"%s\" failed' % str(object))\n        return 0\n\n    def evaluate_parameter(self, name):\n        warnings.warn('Attempt to evaluate parameter \"%s\" failed' % name)\n        return 0\n\n\nclass ExpressionNode(object):\n    def __init__(self):\n        self.errors = []\n\n    def error(self, message, cls=None):\n        self.errors.append((message, cls))\n\n    def __repr__(self):\n        try:\n            string = repr(self.py_string(True))\n        except ValueError:\n            string = \"<error in expression node>\"\n        return \"<%s: %s>\" % (type(self).__name__, string)\n\n    def visit(self, visitor):\n        for error, cls in self.errors:\n            visitor.visit_error(error, cls)\n\n\nclass ConstantExpressionNode(ExpressionNode):\n    def __init__(self, value, is_literal=False):\n        ExpressionNode.__init__(self)\n        self.value = value\n        self.is_literal = is_literal\n\n    def evaluate(self, context):\n        return self.value\n\n    def py_string(self, can_be_ctype):\n        if self.is_literal:\n            return self.value\n        if self.value == float(\"inf\"):\n            return \"float('inf')\"\n        elif self.value == float(\"-inf\"):\n            return \"float('-inf')\"\n        return repr(self.value)\n\n\nclass IdentifierExpressionNode(ExpressionNode):\n    def __init__(self, name):\n        ExpressionNode.__init__(self)\n        self.name = name\n\n    def evaluate(self, context):\n        return context.evaluate_identifier(self.name)\n\n    def visit(self, visitor):\n        visitor.visit_identifier(self.name)\n        ExpressionNode.visit(self, visitor)\n\n    def py_string(self, can_be_ctype):\n        # Errors will be thrown in generated code if identifier evaluates\n        # to a ctypes object, and can_be_ctype is False.\n        return self.name\n\n\nclass ParameterExpressionNode(ExpressionNode):\n    def __init__(self, name):\n        ExpressionNode.__init__(self)\n        self.name = name\n\n    def evaluate(self, context):\n        return context.evaluate_parameter(self.name)\n\n    def visit(self, visitor):\n        ExpressionNode.visit(self, visitor)\n\n    def py_string(self, can_be_ctype):\n        # Errors will be thrown in generated code if parameter is\n        # a ctypes object, and can_be_ctype is False.\n        return self.name\n\n\nclass UnaryExpressionNode(ExpressionNode):\n    def __init__(self, name, op, format, child_can_be_ctype, child):\n        ExpressionNode.__init__(self)\n        self.name = name\n        self.op = op\n        self.format = format\n        self.child_can_be_ctype = child_can_be_ctype\n        self.child = child\n\n    def visit(self, visitor):\n        self.child.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        if self.op:\n            return self.op(self.child.evaluate(context))\n        else:\n            raise ValueError('The C operator \"%s\" can\\'t be evaluated right now' % self.name)\n\n    def py_string(self, can_be_ctype):\n        return self.format % self.child.py_string(self.child_can_be_ctype and can_be_ctype)\n\n\nclass SizeOfExpressionNode(ExpressionNode):\n    def __init__(self, child):\n        ExpressionNode.__init__(self)\n        self.child = child\n\n    def visit(self, visitor):\n        self.child.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        if isinstance(self.child, CtypesType):\n            return context.evaluate_sizeof(self.child)\n        else:\n            return context.evaluate_sizeof_object(self.child)\n\n    def py_string(self, can_be_ctype):\n        if isinstance(self.child, CtypesType):\n            return \"sizeof(%s)\" % self.child.py_string()\n        else:\n            return \"sizeof(%s)\" % self.child.py_string(True)\n\n\nclass BinaryExpressionNode(ExpressionNode):\n    def __init__(self, name, op, format, can_be_ctype, left, right):\n        ExpressionNode.__init__(self)\n        self.name = name\n        self.op = op\n        self.format = format\n        self.can_be_ctype = can_be_ctype\n        self.left = left\n        self.right = right\n\n    def visit(self, visitor):\n        self.left.visit(visitor)\n        self.right.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        if self.op:\n            return self.op(self.left.evaluate(context), self.right.evaluate(context))\n        else:\n            raise ValueError('The C operator \"%s\" can\\'t be evaluated right now' % self.name)\n\n    def py_string(self, can_be_ctype):\n        return self.format % (\n            self.left.py_string(self.can_be_ctype[0] and can_be_ctype),\n            self.right.py_string(self.can_be_ctype[0] and can_be_ctype),\n        )\n\n\nclass ConditionalExpressionNode(ExpressionNode):\n    def __init__(self, cond, yes, no):\n        ExpressionNode.__init__(self)\n        self.cond = cond\n        self.yes = yes\n        self.no = no\n\n    def visit(self, visitor):\n        self.cond.visit(visitor)\n        self.yes.visit(visitor)\n        self.no.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        if self.cond.evaluate(context):\n            return self.yes.evaluate(context)\n        else:\n            return self.no.evaluate(context)\n\n    def py_string(self, can_be_ctype):\n        return \"%s and %s or %s\" % (\n            self.cond.py_string(True),\n            self.yes.py_string(can_be_ctype),\n            self.no.py_string(can_be_ctype),\n        )\n\n\nclass AttributeExpressionNode(ExpressionNode):\n    def __init__(self, op, format, base, attribute):\n        ExpressionNode.__init__(self)\n        self.op = op\n        self.format = format\n        self.base = base\n        self.attribute = attribute\n\n        # Attribute access will raise parse errors if you don't do this.\n        # Fortunately, the processor module does the same thing to\n        # the struct member name.\n        if self.attribute in keyword.kwlist:\n            self.attribute = \"_\" + self.attribute\n\n    def visit(self, visitor):\n        self.base.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        return self.op(self.base.evaluate(context), self.attribute)\n\n    def py_string(self, can_be_ctype):\n        if can_be_ctype:\n            return self.format % (self.base.py_string(can_be_ctype), self.attribute)\n        else:\n            return \"(%s.value)\" % (\n                self.format % (self.base.py_string(can_be_ctype), self.attribute)\n            )\n\n\nclass CallExpressionNode(ExpressionNode):\n    def __init__(self, function, arguments):\n        ExpressionNode.__init__(self)\n        self.function = function\n        self.arguments = arguments\n\n    def visit(self, visitor):\n        self.function.visit(visitor)\n        for arg in self.arguments:\n            arg.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        arguments = [arg.evaluate(context) for arg in self.arguments]\n        return self.function.evaluate(context)(*arguments)\n\n    def py_string(self, can_be_ctype):\n        function = self.function.py_string(can_be_ctype)\n        arguments = [x.py_string(can_be_ctype) for x in self.arguments]\n        return \"(%s (%s))\" % (function, \", \".join(arguments))\n\n\nclass TypeCastExpressionNode(ExpressionNode):\n    \"\"\"\n    Type cast expressions as handled by ctypesgen.  There is a strong\n    possibility that this does not support all types of casts.\n    \"\"\"\n\n    def __init__(self, base, ctype):\n        ExpressionNode.__init__(self)\n        self.base = base\n        self.ctype = ctype\n\n    def visit(self, visitor):\n        self.base.visit(visitor)\n        self.ctype.visit(visitor)\n        ExpressionNode.visit(self, visitor)\n\n    def evaluate(self, context):\n        return self.base.evaluate(context)\n\n    def py_string(self, can_be_ctype):\n        if isinstance(self.ctype, CtypesPointer):\n            return \"cast({}, {})\".format(self.base.py_string(True), self.ctype.py_string())\n        elif isinstance(self.ctype, CtypesStruct):\n            raise TypeError(\n                \"conversion to non-scalar type ({}) requested from {}\".format(\n                    self.ctype, self.base.py_string(False)\n                )\n            )\n        else:\n            # In reality, this conversion should only really work if the types\n            # are scalar types.  We won't work really hard to test if the types\n            # are  indeed scalar.\n            # To be backwards compatible, we always return literals for builtin types.\n            # We use a function to convert to integer for c_char types since\n            # c_char can take integer or byte types, but the others can *only*\n            # take non-char arguments.\n            # ord_if_char must be provided by preambles\n            if isinstance(self.ctype, CtypesSimple) and (\n                self.ctype.name,\n                self.ctype.signed,\n            ) == (\n                \"char\",\n                True,\n            ):\n                ord_if_char = \"\"\n            elif isinstance(self.ctype, CtypesSimple) and self.ctype.name == \"void\":\n                # This is a very simple type cast:  cast everything to (void)\n                # At least one macro from mingw does this\n                return \"None\"\n            else:\n                ord_if_char = \"ord_if_char\"\n\n            return \"({to} ({ord_if_char}({frm}))).value\".format(\n                to=self.ctype.py_string(),\n                ord_if_char=ord_if_char,\n                frm=self.base.py_string(False),\n            )\n\n\nclass UnsupportedExpressionNode(ExpressionNode):\n    def __init__(self, message):\n        ExpressionNode.__init__(self)\n        self.message = message\n        self.error(message, \"unsupported-type\")\n\n    def evaluate(self, context):\n        raise ValueError(\"Tried to evaluate an unsupported expression \" \"node: %s\" % self.message)\n\n    def __repr__(self):\n        return \"<UnsupportedExpressionNode>\"\n\n    def py_string(self, can_be_ctype):\n        raise ValueError(\"Called py_string() an unsupported expression \" \"node: %s\" % self.message)\n"
  },
  {
    "path": "ctypesgen/libraryloader.py",
    "content": "\"\"\"\nLoad libraries - appropriately for all our supported platforms\n\"\"\"\n# ----------------------------------------------------------------------------\n# Copyright (c) 2008 David James\n# Copyright (c) 2006-2008 Alex Holkner\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n#\n#  * Redistributions of source code must retain the above copyright\n#    notice, this list of conditions and the following disclaimer.\n#  * Redistributions in binary form must reproduce the above copyright\n#    notice, this list of conditions and the following disclaimer in\n#    the documentation and/or other materials provided with the\n#    distribution.\n#  * Neither the name of pyglet nor the names of its\n#    contributors may be used to endorse or promote products\n#    derived from this software without specific prior written\n#    permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\n# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n# ----------------------------------------------------------------------------\n\nimport ctypes\nimport ctypes.util\nimport glob\nimport os.path\nimport platform\nimport re\nimport sys\n\n\ndef _environ_path(name):\n    \"\"\"Split an environment variable into a path-like list elements\"\"\"\n    if name in os.environ:\n        return os.environ[name].split(\":\")\n    return []\n\n\nclass LibraryLoader:\n    \"\"\"\n    A base class For loading of libraries ;-)\n    Subclasses load libraries for specific platforms.\n    \"\"\"\n\n    # library names formatted specifically for platforms\n    name_formats = [\"%s\"]\n\n    class Lookup:\n        \"\"\"Looking up calling conventions for a platform\"\"\"\n\n        mode = ctypes.DEFAULT_MODE\n\n        def __init__(self, path):\n            super(LibraryLoader.Lookup, self).__init__()\n            self.access = dict(cdecl=ctypes.CDLL(path, self.mode))\n\n        def get(self, name, calling_convention=\"cdecl\"):\n            \"\"\"Return the given name according to the selected calling convention\"\"\"\n            if calling_convention not in self.access:\n                raise LookupError(\n                    \"Unknown calling convention '{}' for function '{}'\".format(\n                        calling_convention, name\n                    )\n                )\n            return getattr(self.access[calling_convention], name)\n\n        def has(self, name, calling_convention=\"cdecl\"):\n            \"\"\"Return True if this given calling convention finds the given 'name'\"\"\"\n            if calling_convention not in self.access:\n                return False\n            return hasattr(self.access[calling_convention], name)\n\n        def __getattr__(self, name):\n            return getattr(self.access[\"cdecl\"], name)\n\n    def __init__(self):\n        self.other_dirs = []\n\n    def __call__(self, libname):\n        \"\"\"Given the name of a library, load it.\"\"\"\n        paths = self.getpaths(libname)\n\n        for path in paths:\n            # noinspection PyBroadException\n            try:\n                return self.Lookup(path)\n            except Exception:  # pylint: disable=broad-except\n                pass\n\n        raise ImportError(\"Could not load %s.\" % libname)\n\n    def getpaths(self, libname):\n        \"\"\"Return a list of paths where the library might be found.\"\"\"\n        if os.path.isabs(libname):\n            yield libname\n        else:\n            # search through a prioritized series of locations for the library\n\n            # we first search any specific directories identified by user\n            for dir_i in self.other_dirs:\n                for fmt in self.name_formats:\n                    # dir_i should be absolute already\n                    yield os.path.join(dir_i, fmt % libname)\n\n            # check if this code is even stored in a physical file\n            try:\n                this_file = __file__\n            except NameError:\n                this_file = None\n\n            # then we search the directory where the generated python interface is stored\n            if this_file is not None:\n                for fmt in self.name_formats:\n                    yield os.path.abspath(os.path.join(os.path.dirname(__file__), fmt % libname))\n\n            # now, use the ctypes tools to try to find the library\n            for fmt in self.name_formats:\n                path = ctypes.util.find_library(fmt % libname)\n                if path:\n                    yield path\n\n            # then we search all paths identified as platform-specific lib paths\n            for path in self.getplatformpaths(libname):\n                yield path\n\n            # Finally, we'll try the users current working directory\n            for fmt in self.name_formats:\n                yield os.path.abspath(os.path.join(os.path.curdir, fmt % libname))\n\n    def getplatformpaths(self, _libname):  # pylint: disable=no-self-use\n        \"\"\"Return all the library paths available in this platform\"\"\"\n        return []\n\n\n# Darwin (Mac OS X)\n\n\nclass DarwinLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for MacOS\"\"\"\n\n    name_formats = [\n        \"lib%s.dylib\",\n        \"lib%s.so\",\n        \"lib%s.bundle\",\n        \"%s.dylib\",\n        \"%s.so\",\n        \"%s.bundle\",\n        \"%s\",\n    ]\n\n    class Lookup(LibraryLoader.Lookup):\n        \"\"\"\n        Looking up library files for this platform (Darwin aka MacOS)\n        \"\"\"\n\n        # Darwin requires dlopen to be called with mode RTLD_GLOBAL instead\n        # of the default RTLD_LOCAL.  Without this, you end up with\n        # libraries not being loadable, resulting in \"Symbol not found\"\n        # errors\n        mode = ctypes.RTLD_GLOBAL\n\n    def getplatformpaths(self, libname):\n        if os.path.pathsep in libname:\n            names = [libname]\n        else:\n            names = [fmt % libname for fmt in self.name_formats]\n\n        for directory in self.getdirs(libname):\n            for name in names:\n                yield os.path.join(directory, name)\n\n    @staticmethod\n    def getdirs(libname):\n        \"\"\"Implements the dylib search as specified in Apple documentation:\n\n        http://developer.apple.com/documentation/DeveloperTools/Conceptual/\n            DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html\n\n        Before commencing the standard search, the method first checks\n        the bundle's ``Frameworks`` directory if the application is running\n        within a bundle (OS X .app).\n        \"\"\"\n\n        dyld_fallback_library_path = _environ_path(\"DYLD_FALLBACK_LIBRARY_PATH\")\n        if not dyld_fallback_library_path:\n            dyld_fallback_library_path = [\n                os.path.expanduser(\"~/lib\"),\n                \"/usr/local/lib\",\n                \"/usr/lib\",\n            ]\n\n        dirs = []\n\n        if \"/\" in libname:\n            dirs.extend(_environ_path(\"DYLD_LIBRARY_PATH\"))\n        else:\n            dirs.extend(_environ_path(\"LD_LIBRARY_PATH\"))\n            dirs.extend(_environ_path(\"DYLD_LIBRARY_PATH\"))\n            dirs.extend(_environ_path(\"LD_RUN_PATH\"))\n\n        if hasattr(sys, \"frozen\") and getattr(sys, \"frozen\") == \"macosx_app\":\n            dirs.append(os.path.join(os.environ[\"RESOURCEPATH\"], \"..\", \"Frameworks\"))\n\n        dirs.extend(dyld_fallback_library_path)\n\n        return dirs\n\n\n# Posix\n\n\nclass PosixLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for POSIX-like systems (including Linux)\"\"\"\n\n    _ld_so_cache = None\n\n    _include = re.compile(r\"^\\s*include\\s+(?P<pattern>.*)\")\n\n    name_formats = [\"lib%s.so\", \"%s.so\", \"%s\"]\n\n    class _Directories(dict):\n        \"\"\"Deal with directories\"\"\"\n\n        def __init__(self):\n            dict.__init__(self)\n            self.order = 0\n\n        def add(self, directory):\n            \"\"\"Add a directory to our current set of directories\"\"\"\n            if len(directory) > 1:\n                directory = directory.rstrip(os.path.sep)\n            # only adds and updates order if exists and not already in set\n            if not os.path.exists(directory):\n                return\n            order = self.setdefault(directory, self.order)\n            if order == self.order:\n                self.order += 1\n\n        def extend(self, directories):\n            \"\"\"Add a list of directories to our set\"\"\"\n            for a_dir in directories:\n                self.add(a_dir)\n\n        def ordered(self):\n            \"\"\"Sort the list of directories\"\"\"\n            return (i[0] for i in sorted(self.items(), key=lambda d: d[1]))\n\n    def _get_ld_so_conf_dirs(self, conf, dirs):\n        \"\"\"\n        Recursive function to help parse all ld.so.conf files, including proper\n        handling of the `include` directive.\n        \"\"\"\n\n        try:\n            with open(conf) as fileobj:\n                for dirname in fileobj:\n                    dirname = dirname.strip()\n                    if not dirname:\n                        continue\n\n                    match = self._include.match(dirname)\n                    if not match:\n                        dirs.add(dirname)\n                    else:\n                        for dir2 in glob.glob(match.group(\"pattern\")):\n                            self._get_ld_so_conf_dirs(dir2, dirs)\n        except IOError:\n            pass\n\n    def _create_ld_so_cache(self):\n        # Recreate search path followed by ld.so.  This is going to be\n        # slow to build, and incorrect (ld.so uses ld.so.cache, which may\n        # not be up-to-date).  Used only as fallback for distros without\n        # /sbin/ldconfig.\n        #\n        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.\n\n        directories = self._Directories()\n        for name in (\n            \"LD_LIBRARY_PATH\",\n            \"SHLIB_PATH\",  # HP-UX\n            \"LIBPATH\",  # OS/2, AIX\n            \"LIBRARY_PATH\",  # BE/OS\n        ):\n            if name in os.environ:\n                directories.extend(os.environ[name].split(os.pathsep))\n\n        self._get_ld_so_conf_dirs(\"/etc/ld.so.conf\", directories)\n\n        bitage = platform.architecture()[0]\n\n        unix_lib_dirs_list = []\n        if bitage.startswith(\"64\"):\n            # prefer 64 bit if that is our arch\n            unix_lib_dirs_list += [\"/lib64\", \"/usr/lib64\"]\n\n        # must include standard libs, since those paths are also used by 64 bit\n        # installs\n        unix_lib_dirs_list += [\"/lib\", \"/usr/lib\"]\n        if sys.platform.startswith(\"linux\"):\n            # Try and support multiarch work in Ubuntu\n            # https://wiki.ubuntu.com/MultiarchSpec\n            if bitage.startswith(\"32\"):\n                # Assume Intel/AMD x86 compat\n                unix_lib_dirs_list += [\"/lib/i386-linux-gnu\", \"/usr/lib/i386-linux-gnu\"]\n            elif bitage.startswith(\"64\"):\n                # Assume Intel/AMD x86 compatible\n                unix_lib_dirs_list += [\n                    \"/lib/x86_64-linux-gnu\",\n                    \"/usr/lib/x86_64-linux-gnu\",\n                ]\n            else:\n                # guess...\n                unix_lib_dirs_list += glob.glob(\"/lib/*linux-gnu\")\n        directories.extend(unix_lib_dirs_list)\n\n        cache = {}\n        lib_re = re.compile(r\"lib(.*)\\.s[ol]\")\n        # ext_re = re.compile(r\"\\.s[ol]$\")\n        for our_dir in directories.ordered():\n            try:\n                for path in glob.glob(\"%s/*.s[ol]*\" % our_dir):\n                    file = os.path.basename(path)\n\n                    # Index by filename\n                    cache_i = cache.setdefault(file, set())\n                    cache_i.add(path)\n\n                    # Index by library name\n                    match = lib_re.match(file)\n                    if match:\n                        library = match.group(1)\n                        cache_i = cache.setdefault(library, set())\n                        cache_i.add(path)\n            except OSError:\n                pass\n\n        self._ld_so_cache = cache\n\n    def getplatformpaths(self, libname):\n        if self._ld_so_cache is None:\n            self._create_ld_so_cache()\n\n        result = self._ld_so_cache.get(libname, set())\n        for i in result:\n            # we iterate through all found paths for library, since we may have\n            # actually found multiple architectures or other library types that\n            # may not load\n            yield i\n\n\n# Windows\n\n\nclass WindowsLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for Microsoft Windows\"\"\"\n\n    name_formats = [\"%s.dll\", \"lib%s.dll\", \"%slib.dll\", \"%s\"]\n\n    class Lookup(LibraryLoader.Lookup):\n        \"\"\"Lookup class for Windows libraries...\"\"\"\n\n        def __init__(self, path):\n            super(WindowsLibraryLoader.Lookup, self).__init__(path)\n            self.access[\"stdcall\"] = ctypes.windll.LoadLibrary(path)\n\n\n# Platform switching\n\n# If your value of sys.platform does not appear in this dict, please contact\n# the Ctypesgen maintainers.\n\nloaderclass = {\n    \"darwin\": DarwinLibraryLoader,\n    \"cygwin\": WindowsLibraryLoader,\n    \"win32\": WindowsLibraryLoader,\n    \"msys\": WindowsLibraryLoader,\n}\n\nload_library = loaderclass.get(sys.platform, PosixLibraryLoader)()\n\n\ndef add_library_search_dirs(other_dirs):\n    \"\"\"\n    Add libraries to search paths.\n    If library paths are relative, convert them to absolute with respect to this\n    file's directory\n    \"\"\"\n    for path in other_dirs:\n        if not os.path.isabs(path):\n            path = os.path.abspath(path)\n        load_library.other_dirs.append(path)\n\n\ndel loaderclass\n"
  },
  {
    "path": "ctypesgen/messages.py",
    "content": "\"\"\"\nctypesgen.messages contains functions to display status, error, or warning\nmessages to the user. Warning and error messages are also associated\nwith a \"message class\", which is a string, which currently has no effect.\n\nError classes are:\n'usage' - there was something funny about the command-line parameters\n'cparser' - there was a syntax error in the header file\n'missing-library' - a library could not be loaded\n'macro' - a macro could not be translated to Python\n'unsupported-type' - there was a type in the header that ctypes cannot use, like\n    \"long double\".\n'other' - catchall.\n\nWarning classes are:\n'usage' - there was something funny about the command-line parameters\n'rename' - a description has been renamed to avoid a name conflict\n'other' - catchall.\n\"\"\"\n\nimport logging\n\n__all__ = [\"error_message\", \"warning_message\", \"status_message\"]\n\nlog = logging.getLogger(\"ctypesgen\")\nch = logging.StreamHandler()  # use stdio\nlogging_fmt_str = \"%(levelname)s: %(message)s\"\nformatter = logging.Formatter(logging_fmt_str)\nch.setFormatter(formatter)\nlog.addHandler(ch)\nlog.setLevel(logging.INFO)  # default level that ctypesgen was using with original version\n\n\ndef error_message(msg, cls=None):\n    log.error(\"%s\", msg)\n\n\ndef warning_message(msg, cls=None):\n    log.warning(\"%s\", msg)\n\n\ndef status_message(msg):\n    log.info(\"Status: %s\", msg)\n"
  },
  {
    "path": "ctypesgen/options.py",
    "content": "\"\"\"\nAll of the components of ctypegencore require an argument called \"options\".\nIn command-line usage, this would be an argparse.Namespace object. However,\nif ctypesgen is used as a standard Python module, constructing this object\nwould be a pain. So this module exists to provide a \"default\" options object\nfor convenience.\n\"\"\"\n\nimport argparse\nimport copy\n\ndefault_values = {\n    \"other_headers\": [],\n    \"modules\": [],\n    \"include_search_paths\": [],\n    \"compile_libdirs\": [],\n    \"runtime_libdirs\": [],\n    \"cpp\": \"gcc -E\",\n    \"allow_gnu_c\": False,\n    \"cpp_defines\": [],\n    \"cpp_undefines\": [],\n    \"save_preprocessed_headers\": None,\n    \"all_headers\": False,\n    \"builtin_symbols\": False,\n    \"include_symbols\": [],\n    \"exclude_symbols\": [],\n    \"show_all_errors\": False,\n    \"show_long_errors\": False,\n    \"show_macro_warnings\": True,\n    \"header_template\": None,\n    \"inserted_files\": [],\n    \"other_known_names\": [],\n    \"include_macros\": True,\n    \"include_undefs\": True,\n    \"libraries\": [],\n    \"strip_build_path\": None,\n    \"output_language\": \"py\",\n    \"no_stddef_types\": False,\n    \"no_gnu_types\": False,\n    \"no_python_types\": False,\n    \"debug_level\": 0,\n    \"strip_prefixes\": [],\n    \"embed_preamble\": True,\n    \"no_load_library\": False,\n}\n\n\ndef get_default_options():\n    return argparse.Namespace(**copy.deepcopy(default_values))\n"
  },
  {
    "path": "ctypesgen/parser/.gitignore",
    "content": "new_parsetab.py\nparser.out\n"
  },
  {
    "path": "ctypesgen/parser/__init__.py",
    "content": "\"\"\"\nThis package parses C header files and generates lists of functions, typedefs,\nvariables, structs, unions, enums, macros, and constants. This package knows\nnothing about the libraries themselves.\n\nThe public interface for this package is the function \"parse\". Use as follows:\n>>> descriptions = parse([\"inputfile1.h\",\"inputfile2.h\"], options)\nwhere \"options\" is an argparse.Namespace object.\n\nparse() returns a DescriptionCollection object. See ctypesgen.descriptions\nfor more information.\n\n\"\"\"\n\nfrom .datacollectingparser import DataCollectingParser\n\n\ndef parse(headers, options):\n    parser = DataCollectingParser(headers, options)\n    parser.parse()\n    return parser.data()\n\n\n__all__ = [\"parse\"]\n"
  },
  {
    "path": "ctypesgen/parser/cdeclarations.py",
    "content": "\"\"\"\nThis file contains classes that represent C declarations. cparser produces\ndeclarations in this format, and ctypesparser reformats them into a format that\nis not C-specific. The other modules don't need to touch these.\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\n# --------------------------------------------------------------------------\n# C Object Model\n# --------------------------------------------------------------------------\n\n\nclass Declaration(object):\n    def __init__(self):\n        self.declarator = None\n        self.type = Type()\n        self.storage = None\n        self.attrib = Attrib()\n\n    def __repr__(self):\n        d = {\"declarator\": self.declarator, \"type\": self.type}\n        if self.storage:\n            d[\"storage\"] = self.storage\n        li = [\"%s=%r\" % (k, v) for k, v in d.items()]\n        return \"Declaration(%s)\" % \", \".join(li)\n\n\nclass Declarator(object):\n    pointer = None\n\n    def __init__(self):\n        self.identifier = None\n        self.initializer = None\n        self.array = None\n        self.parameters = None\n        self.bitfield = None\n        self.attrib = Attrib()\n\n    # make pointer read-only to catch mistakes early\n    pointer = property(lambda self: None)\n\n    def __repr__(self):\n        s = self.identifier or \"\"\n        if self.bitfield:\n            s += f\":{self.bitfield.value}\"\n        if self.array:\n            s += repr(self.array)\n        if self.initializer:\n            s += \" = %r\" % self.initializer\n        if self.parameters is not None:\n            s += \"(\" + \", \".join([repr(p) for p in self.parameters]) + \")\"\n        return s\n\n\nclass Pointer(Declarator):\n    pointer = None\n\n    def __init__(self):\n        super(Pointer, self).__init__()\n        self.qualifiers = []\n\n    def __repr__(self):\n        q = \"\"\n        if self.qualifiers:\n            q = \"<%s>\" % \" \".join(self.qualifiers)\n        return \"POINTER%s(%r)\" % (q, self.pointer) + super(Pointer, self).__repr__()\n\n\nclass Array(object):\n    def __init__(self):\n        self.size = None\n        self.array = None\n\n    def __repr__(self):\n        if self.size:\n            a = \"[%r]\" % self.size\n        else:\n            a = \"[]\"\n        if self.array:\n            return repr(self.array) + a\n        else:\n            return a\n\n\nclass Parameter(object):\n    def __init__(self):\n        self.type = Type()\n        self.storage = None\n        self.declarator = None\n        self.attrib = Attrib()\n\n    def __repr__(self):\n        d = {\"type\": self.type}\n        if self.declarator:\n            d[\"declarator\"] = self.declarator\n        if self.storage:\n            d[\"storage\"] = self.storage\n        li = [\"%s=%r\" % (k, v) for k, v in d.items()]\n        return \"Parameter(%s)\" % \", \".join(li)\n\n\nclass Type(object):\n    def __init__(self):\n        self.qualifiers = []\n        self.specifiers = []\n\n    def __repr__(self):\n        return \" \".join(self.qualifiers + [str(s) for s in self.specifiers])\n\n\n# These are used only internally.\n\n\nclass StorageClassSpecifier(str):\n    def __repr__(self):\n        return \"StorageClassSpecifier({})\".format(str(self))\n\n\nclass TypeSpecifier(str):\n    def __repr__(self):\n        return \"TypeSpecifier({})\".format(str(self))\n\n\nclass StructTypeSpecifier(object):\n    def __init__(self, is_union, attrib, tag, declarations):\n        self.is_union = is_union\n        self.attrib = attrib\n        self.tag = tag\n        self.declarations = declarations\n        self.filename = None\n        self.lineno = -1\n\n    def __repr__(self):\n        if self.is_union:\n            s = \"union\"\n        else:\n            s = \"struct\"\n        if self.attrib:\n            attrs = list()\n            for attr, val in self.attrib.items():\n                if val and type(val) == str:\n                    attrs.append(\"{}({})\".format(attr, val))\n                elif val:\n                    attrs.append(attr)\n\n            s += \" __attribute__(({}))\".format(\",\".join(attrs))\n        if self.tag and type(self.tag) != int:\n            s += \" %s\" % self.tag\n        if self.declarations:\n            s += \" {%s}\" % \"; \".join([repr(d) for d in self.declarations])\n        return s\n\n\nclass EnumSpecifier(object):\n    def __init__(self, tag, enumerators, src=None):\n        self.tag = tag\n        self.enumerators = enumerators\n        self.filename = None\n        self.lineno = -1\n\n    def __repr__(self):\n        s = \"enum\"\n        if self.tag:\n            s += \" %s\" % self.tag\n        if self.enumerators:\n            s += \" {%s}\" % \", \".join([repr(e) for e in self.enumerators])\n        return s\n\n\nclass Enumerator(object):\n    def __init__(self, name, expression):\n        self.name = name\n        self.expression = expression\n\n    def __repr__(self):\n        s = self.name\n        if self.expression:\n            s += \" = %r\" % self.expression\n        return s\n\n\nclass TypeQualifier(str):\n    def __repr__(self):\n        return \"TypeQualifier({})\".format(str(self))\n\n\nclass PragmaPack(object):\n    DEFAULT = None\n\n    def __init__(self):\n        self.current = self.DEFAULT\n        self.stack = list()\n\n    def set_default(self):\n        self.current = self.DEFAULT\n\n    def push(self, id=None, value=None):\n        item = (id, self.current)\n        self.stack.append(item)\n\n        if value is not None:\n            self.current = value\n\n    def pop(self, id=None):\n        if not self.stack:\n            if id:\n                return (\n                    \"#pragma pack(pop, {id}) encountered without matching \"\n                    \"#pragma pack(push, {id})\".format(id=id),\n                )\n            else:\n                return \"#pragma pack(pop) encountered without matching #pragma pack(push)\"\n\n        item = None\n        err = None\n\n        if id is not None:\n            i = len(self.stack) - 1\n            while i >= 0 and self.stack[i][0] != id:\n                i -= 1\n\n            if i >= 0:\n                item = self.stack[i]\n                self.stack = self.stack[:i]\n            else:\n                err = (\n                    \"#pragma pack(pop, {id}) encountered without matching \"\n                    \"#pragma pack(push, {id}); popped last\".format(id=id)\n                )\n\n        if item is None:\n            item = self.stack.pop()\n\n        self.current = item[1]\n        return err\n\n\npragma_pack = PragmaPack()\n\n\nclass Attrib(dict):\n    def __init__(self, *a, **kw):\n        if pragma_pack.current:\n            super(Attrib, self).__init__(packed=True, aligned=[pragma_pack.current])\n            super(Attrib, self).update(*a, **kw)\n        else:\n            super(Attrib, self).__init__(*a, **kw)\n        self._unalias()\n\n    def __repr__(self):\n        return \"Attrib({})\".format(dict(self))\n\n    def update(self, *a, **kw):\n        super(Attrib, self).update(*a, **kw)\n        self._unalias()\n\n    def _unalias(self):\n        \"\"\"\n        Check for any attribute aliases and remove leading/trailing '__'\n\n        According to https://gcc.gnu.org/onlinedocs/gcc/Attribute-Syntax.html,\n        an attribute can also be preceded/followed by a double underscore\n        ('__').\n        \"\"\"\n\n        self.pop(None, None)  # remove dummy empty attribute\n\n        fixes = [attr for attr in self if attr.startswith(\"__\") and attr.endswith(\"__\")]\n        for attr in fixes:\n            self[attr[2 : (len(attr) - 2)]] = self.pop(attr)\n\n\ndef apply_specifiers(specifiers, declaration):\n    \"\"\"Apply specifiers to the declaration (declaration may be\n    a Parameter instead).\"\"\"\n    for s in specifiers:\n        if type(s) == StorageClassSpecifier:\n            if declaration.storage:\n                # Multiple storage classes, technically an error... ignore it\n                pass\n            declaration.storage = s\n        elif type(s) in (TypeSpecifier, StructTypeSpecifier, EnumSpecifier):\n            declaration.type.specifiers.append(s)\n        elif type(s) == TypeQualifier:\n            declaration.type.qualifiers.append(s)\n        elif type(s) == Attrib:\n            declaration.attrib.update(s)\n"
  },
  {
    "path": "ctypesgen/parser/cgrammar.py",
    "content": "#!/usr/bin/env python3\n\n\"\"\"This is a yacc grammar for C.\n\nDerived from ANSI C grammar:\n  * Lexicon: http://www.lysator.liu.se/c/ANSI-C-grammar-l.html\n             http://www.quut.com/c/ANSI-C-grammar-l-2011.html\n  * Grammar: http://www.lysator.liu.se/c/ANSI-C-grammar-y.html\n             http://www.quut.com/c/ANSI-C-grammar-y-2011.html\n\nReference is C99:\n  * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf\n\nParts of C2X (C23) is included:\n  * http://www.open-std.org/jtc1/sc22/wg14/www/docs/n2731.pdf\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\nif __name__ == \"__main__\":\n    # NOTE if this file is modified, run to generate a new parsetab.py\n    #   E.g.:\n    #       env PYTHONPATH=. python ctypesgen/parser/cgrammar.py\n    # new_parsetab.py is generated in the current directory and needs to be\n    # manually copied (after inspection) to ctypesgen/parser/parsetab.py\n    import sys\n    import os\n\n    sys.path.insert(0, os.path.join(os.path.pardir, os.path.pardir))\n    from ctypesgen.parser.cgrammar import main\n\n    main()\n    sys.exit()\n\nimport os.path\nimport sys\n\nfrom ctypesgen import expressions\nfrom ctypesgen.ctypedescs import anonymous_struct_tagnum\nfrom ctypesgen.parser import cdeclarations, yacc\n\n\nreserved_keyword_tokens = (\n    \"SIZEOF\", \"TYPEDEF\", \"EXTERN\", \"STATIC\", \"AUTO\", \"REGISTER\", \"INLINE\",\n    \"CONST\", \"RESTRICT\", \"VOLATILE\",\n    \"CHAR\", \"SHORT\", \"INT\", \"LONG\", \"SIGNED\", \"UNSIGNED\", \"FLOAT\", \"DOUBLE\",\n    \"VOID\", \"STRUCT\", \"UNION\", \"ENUM\",\n\n    \"CASE\", \"DEFAULT\", \"IF\", \"ELSE\", \"SWITCH\", \"WHILE\", \"DO\", \"FOR\", \"GOTO\",\n    \"CONTINUE\", \"BREAK\", \"RETURN\",\n)\n\nreserved_keyword_tokens_new = (\n    \"_BOOL\", \"_NORETURN\",\n    # \"_ALIGNAS\", \"_ALIGNOF\", \"_ATOMIC\", \"_COMPLEX\",\n    # \"_DECIMAL128\", \"_DECIMAL32\", \"_DECIMAL64\",\n    # \"_GENERIC\", \"_IMAGINARY\", \"_STATIC_ASSERT\", \"_THREAD_LOCAL\",\n)\n\nextra_keywords_with_alias = {\n    \"__asm__\": \"__ASM__\",\n    \"__attribute__\": \"__ATTRIBUTE__\",\n    \"__restrict\": \"RESTRICT\",\n    \"__inline__\": \"INLINE\",\n    \"__inline\": \"INLINE\",\n}\n\nkeyword_map = {}\nfor keyword in reserved_keyword_tokens:\n    keyword_map[keyword.lower()] = keyword\nfor keyword in reserved_keyword_tokens_new:\n    keyword_map[keyword[:2].upper() + keyword[2:].lower()] = keyword\n    keyword_map[keyword[1:].lower()] = keyword\nkeyword_map.update(extra_keywords_with_alias)\n\nkeywords = tuple(keyword_map.keys())\n\ntokens = reserved_keyword_tokens + reserved_keyword_tokens_new + (\n    # Identifier\n    \"IDENTIFIER\",\n\n    # Type identifiers\n    \"TYPE_NAME\",\n    # \"FUNC_NAME\",  \"TYPEDEF_NAME\",\n\n    # Constants\n    \"STRING_LITERAL\", \"CHARACTER_CONSTANT\",\n    # \"ENUMERATION_CONSTANT\",\n    \"I_CONST_HEX\", \"I_CONST_DEC\", \"I_CONST_OCT\", \"I_CONST_BIN\",\n    \"F_CONST_1\", \"F_CONST_2\", \"F_CONST_3\", \"F_CONST_4\", \"F_CONST_5\", \"F_CONST_6\",\n\n    # Operators\n    \"PLUS\", \"MINUS\", \"TIMES\", \"DIVIDE\", \"MOD\", \"AND\",\n    \"OR\", \"NOT\", \"XOR\", \"LNOT\", \"LT\", \"GT\", \"CONDOP\",\n    \"PTR_OP\", \"INC_OP\", \"DEC_OP\", \"LEFT_OP\", \"RIGHT_OP\",\n    \"LE_OP\", \"GE_OP\", \"EQ_OP\", \"NE_OP\", \"AND_OP\", \"OR_OP\",\n\n    # Assignment\n    \"MUL_ASSIGN\", \"DIV_ASSIGN\", \"MOD_ASSIGN\", \"ADD_ASSIGN\",\n    \"SUB_ASSIGN\", \"LEFT_ASSIGN\", \"RIGHT_ASSIGN\", \"AND_ASSIGN\",\n    \"XOR_ASSIGN\", \"OR_ASSIGN\", \"EQUALS\",\n\n    # Preprocessor\n    \"PP_DEFINE\", \"PP_DEFINE_MACRO_NAME\", \"PP_DEFINE_NAME\", \"PP_END_DEFINE\",\n    \"PP_IDENTIFIER_PASTE\", \"PP_MACRO_PARAM\", \"PP_STRINGIFY\", \"PP_UNDEFINE\",\n    # \"PP_NUMBER\",\n\n    # Pragma\n    \"PRAGMA\", \"PRAGMA_END\", \"PRAGMA_PACK\",\n\n    # Delimiters\n    \"PERIOD\", \"ELLIPSIS\", \"LPAREN\", \"RPAREN\", \"LBRACKET\",\n    \"RBRACKET\", \"LBRACE\", \"RBRACE\", \"COMMA\", \"SEMI\",\n    \"COLON\",\n\n    \"__ASM__\", \"__ATTRIBUTE__\",\n)\n\n\nprecedence = ((\"nonassoc\", \"IF\"), (\"nonassoc\", \"ELSE\"))\n\n\ndef p_translation_unit(p):\n    \"\"\" translation_unit :\n                         | translation_unit external_declaration\n                         | translation_unit directive\n    \"\"\"\n    # Starting production.\n    # Allow empty production so that files with no declarations are still\n    #    valid.\n    # Intentionally empty\n\n\ndef p_identifier(p):\n    \"\"\" identifier : IDENTIFIER\n                   | IDENTIFIER PP_IDENTIFIER_PASTE identifier\n                   | PP_MACRO_PARAM PP_IDENTIFIER_PASTE identifier\n                   | IDENTIFIER PP_IDENTIFIER_PASTE PP_MACRO_PARAM\n                   | PP_MACRO_PARAM PP_IDENTIFIER_PASTE PP_MACRO_PARAM\n    \"\"\"\n    if len(p) == 2:\n        p[0] = expressions.IdentifierExpressionNode(p[1])\n    else:\n        # Should it be supported? It wouldn't be very hard to add support.\n        # Basically, it would involve a new ExpressionNode called\n        # an IdentifierPasteExpressionNode that took a list of strings and\n        # ParameterExpressionNodes. Then it would generate code like\n        # \"locals()['%s' + '%s' + ...]\" where %s was substituted with the\n        # elements of the list. I haven't supported it yet because I think\n        # it's unnecessary and a little too powerful.\n        p[0] = expressions.UnsupportedExpressionNode(\n            \"Identifier pasting is not supported by ctypesgen.\"\n        )\n\n\ndef p_constant_integer(p):\n    \"\"\" constant : I_CONST_HEX\n                 | I_CONST_DEC\n                 | I_CONST_OCT\n                 | I_CONST_BIN\n    \"\"\"\n    constant = p[1]\n    is_literal = True\n\n    if constant.isdigit():\n        is_literal = False\n        constant = int(p[1])\n\n    p[0] = expressions.ConstantExpressionNode(constant, is_literal=is_literal)\n\n\ndef p_constant_float(p):\n    \"\"\" constant : F_CONST_1\n                 | F_CONST_2\n                 | F_CONST_3\n                 | F_CONST_4\n                 | F_CONST_5\n                 | F_CONST_6\n    \"\"\"\n    p[0] = expressions.ConstantExpressionNode(p[1], is_literal=True)\n\n\ndef p_constant_character(p):\n    \"\"\" constant : CHARACTER_CONSTANT\n    \"\"\"\n    constant_char = p[1]\n\n    p[0] = expressions.ConstantExpressionNode(constant_char)\n\n\ndef p_string_literal(p):\n    \"\"\" string_literal : STRING_LITERAL\n    \"\"\"\n    p[0] = expressions.ConstantExpressionNode(p[1])\n\n\ndef p_multi_string_literal(p):\n    \"\"\" multi_string_literal : string_literal\n                             | macro_param\n                             | multi_string_literal string_literal\n                             | multi_string_literal macro_param\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"string concatenation\", (lambda x, y: x + y), \"(%s + %s)\", (False, False), p[1], p[2]\n        )\n\n\ndef p_macro_param(p):\n    \"\"\" macro_param : PP_MACRO_PARAM\n                    | PP_STRINGIFY PP_MACRO_PARAM\n    \"\"\"\n    if len(p) == 2:\n        p[0] = expressions.ParameterExpressionNode(p[1])\n    else:\n        p[0] = expressions.ParameterExpressionNode(p[2])\n\n\ndef p_primary_expression(p):\n    \"\"\" primary_expression : identifier\n                           | constant\n                           | multi_string_literal\n                           | LPAREN expression RPAREN\n    \"\"\"\n    if p[1] == \"(\":\n        p[0] = p[2]\n    else:\n        p[0] = p[1]\n\n\ndef p_postfix_expression(p):\n    \"\"\" postfix_expression : primary_expression\n                           | postfix_expression LBRACKET expression RBRACKET\n                           | postfix_expression LPAREN RPAREN\n                           | postfix_expression LPAREN argument_expression_list RPAREN\n                           | postfix_expression PERIOD IDENTIFIER\n                           | postfix_expression PTR_OP IDENTIFIER\n                           | postfix_expression INC_OP\n                           | postfix_expression DEC_OP\n    \"\"\"\n\n    if len(p) == 2:\n        p[0] = p[1]\n\n    elif p[2] == \"[\":\n        p[0] = expressions.BinaryExpressionNode(\n            \"array access\", (lambda a, b: a[b]), \"(%s [%s])\", (True, False), p[1], p[3]\n        )\n\n    elif p[2] == \"(\":\n        if p[3] == \")\":\n            p[0] = expressions.CallExpressionNode(p[1], [])\n        else:\n            p[0] = expressions.CallExpressionNode(p[1], p[3])\n\n    elif p[2] == \".\":\n        p[0] = expressions.AttributeExpressionNode(\n            (lambda x, a: getattr(x, a)), \"(%s.%s)\", p[1], p[3]\n        )\n\n    elif p[2] == \"->\":\n        p[0] = expressions.AttributeExpressionNode(\n            (lambda x, a: getattr(x.contents, a)), \"(%s.contents.%s)\", p[1], p[3]\n        )\n\n    elif p[2] == \"++\":\n        p[0] = expressions.UnaryExpressionNode(\n            \"increment\", (lambda x: x + 1), \"(%s + 1)\", False, p[1]\n        )\n\n    elif p[2] == \"--\":\n        p[0] = expressions.UnaryExpressionNode(\n            \"decrement\", (lambda x: x - 1), \"(%s - 1)\", False, p[1]\n        )\n\n\ndef p_argument_expression_list(p):\n    \"\"\" argument_expression_list : assignment_expression\n                                 | argument_expression_list COMMA assignment_expression\n                                 | type_name\n                                 | argument_expression_list COMMA type_name\n    \"\"\"\n    if len(p) == 4:\n        p[1].append(p[3])\n        p[0] = p[1]\n    else:\n        p[0] = [p[1]]\n\n\ndef p_asm_expression(p):\n    \"\"\" asm_expression : __ASM__ volatile_opt LPAREN string_literal RPAREN\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list RPAREN\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN\n    \"\"\"\n\n    # Definitely not ISO C, adapted from example ANTLR GCC parser at\n    #  http://www.antlr.org/grammar/cgram//grammars/GnuCParser.g\n    # but more lenient (expressions permitted in optional final part, when\n    # they shouldn't be -- avoids shift/reduce conflict with\n    # str_opt_expr_pair_list).\n\n    p[0] = expressions.UnsupportedExpressionNode(\"This node is ASM assembler.\")\n\n\ndef p_str_opt_expr_pair_list(p):\n    \"\"\" str_opt_expr_pair_list :\n                               | str_opt_expr_pair\n                               | str_opt_expr_pair_list COMMA str_opt_expr_pair\n    \"\"\"\n\n\ndef p_str_opt_expr_pair(p):\n    \"\"\" str_opt_expr_pair : string_literal\n                          | string_literal LPAREN expression RPAREN\n    \"\"\"\n\n\ndef p_volatile_opt(p):\n    \"\"\" volatile_opt :\n                     | VOLATILE\n    \"\"\"\n\n\nprefix_ops_dict = {\n    \"++\": (\"increment\", (lambda x: x + 1), \"(%s + 1)\", False),\n    \"--\": (\"decrement\", (lambda x: x - 1), \"(%s - 1)\", False),\n    \"&\": (\"reference ('&')\", None, \"pointer(%s)\", True),\n    \"*\": (\"dereference ('*')\", None, \"(%s[0])\", True),\n    \"+\": (\"unary '+'\", (lambda x: x), \"%s\", True),\n    \"-\": (\"negation\", (lambda x: -x), \"(-%s)\", False),\n    \"~\": (\"inversion\", (lambda x: ~x), \"(~%s)\", False),\n    \"!\": (\"logical not\", (lambda x: not x), \"(not %s)\", True),\n}\n\n\ndef p_unary_expression(p):\n    \"\"\" unary_expression : postfix_expression\n                         | INC_OP unary_expression\n                         | DEC_OP unary_expression\n                         | unary_operator cast_expression\n                         | SIZEOF unary_expression\n                         | SIZEOF LPAREN type_name RPAREN\n                         | asm_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n\n    elif p[1] == \"sizeof\":\n        if len(p) == 5:\n            p[0] = expressions.SizeOfExpressionNode(p[3])\n        else:\n            p[0] = expressions.SizeOfExpressionNode(p[2])\n\n    else:\n        name, op, format, can_be_ctype = prefix_ops_dict[p[1]]\n        p[0] = expressions.UnaryExpressionNode(name, op, format, can_be_ctype, p[2])\n\n\ndef p_unary_operator(p):\n    \"\"\" unary_operator : AND\n                       | TIMES\n                       | PLUS\n                       | MINUS\n                       | NOT\n                       | LNOT\n    \"\"\"\n    p[0] = p[1]\n\n\ndef p_cast_expression(p):\n    \"\"\" cast_expression : unary_expression\n                        | LPAREN type_name RPAREN cast_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.TypeCastExpressionNode(p[4], p[2])\n\n\nmult_ops_dict = {\n    \"*\": (\"multiplication\", (lambda x, y: x * y), \"(%s * %s)\"),\n    \"/\": (\"division\", (lambda x, y: x / y), \"(%s / %s)\"),\n    \"%\": (\"modulo\", (lambda x, y: x % y), \"(%s %% %s)\"),\n}\n\n\ndef p_multiplicative_expression(p):\n    \"\"\" multiplicative_expression : cast_expression\n                                  | multiplicative_expression TIMES cast_expression\n                                  | multiplicative_expression DIVIDE cast_expression\n                                  | multiplicative_expression MOD cast_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        name, op, format = mult_ops_dict[p[2]]\n        p[0] = expressions.BinaryExpressionNode(name, op, format, (False, False), p[1], p[3])\n\n\nadd_ops_dict = {\n    \"+\": (\"addition\", (lambda x, y: x + y), \"(%s + %s)\"),\n    \"-\": (\"subtraction\", (lambda x, y: x - y), \"(%s - %s)\"),\n}\n\n\ndef p_additive_expression(p):\n    \"\"\" additive_expression : multiplicative_expression\n                            | additive_expression PLUS multiplicative_expression\n                            | additive_expression MINUS multiplicative_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        name, op, format = add_ops_dict[p[2]]\n        p[0] = expressions.BinaryExpressionNode(name, op, format, (False, False), p[1], p[3])\n\n\nshift_ops_dict = {\n    \">>\": (\"right shift\", (lambda x, y: x >> y), \"(%s >> %s)\"),\n    \"<<\": (\"left shift\", (lambda x, y: x << y), \"(%s << %s)\"),\n}\n\n\ndef p_shift_expression(p):\n    \"\"\" shift_expression : additive_expression\n                         | shift_expression LEFT_OP additive_expression\n                         | shift_expression RIGHT_OP additive_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        name, op, format = shift_ops_dict[p[2]]\n        p[0] = expressions.BinaryExpressionNode(name, op, format, (False, False), p[1], p[3])\n\n\nrel_ops_dict = {\n    \">\": (\"greater-than\", (lambda x, y: x > y), \"(%s > %s)\"),\n    \"<\": (\"less-than\", (lambda x, y: x < y), \"(%s < %s)\"),\n    \">=\": (\"greater-than-equal\", (lambda x, y: x >= y), \"(%s >= %s)\"),\n    \"<=\": (\"less-than-equal\", (lambda x, y: x <= y), \"(%s <= %s)\"),\n}\n\n\ndef p_relational_expression(p):\n    \"\"\" relational_expression : shift_expression\n                              | relational_expression LT shift_expression\n                              | relational_expression GT shift_expression\n                              | relational_expression LE_OP shift_expression\n                              | relational_expression GE_OP shift_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        name, op, format = rel_ops_dict[p[2]]\n        p[0] = expressions.BinaryExpressionNode(name, op, format, (False, False), p[1], p[3])\n\n\nequality_ops_dict = {\n    \"==\": (\"equals\", (lambda x, y: x == y), \"(%s == %s)\"),\n    \"!=\": (\"not equals\", (lambda x, y: x != y), \"(%s != %s)\"),\n}\n\n\ndef p_equality_expression(p):\n    \"\"\" equality_expression : relational_expression\n                            | equality_expression EQ_OP relational_expression\n                            | equality_expression NE_OP relational_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        name, op, format = equality_ops_dict[p[2]]\n        p[0] = expressions.BinaryExpressionNode(name, op, format, (False, False), p[1], p[3])\n\n\ndef p_and_expression(p):\n    \"\"\" and_expression : equality_expression\n                       | and_expression AND equality_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"bitwise and\", (lambda x, y: x & y), \"(%s & %s)\", (False, False), p[1], p[3]\n        )\n\n\ndef p_exclusive_or_expression(p):\n    \"\"\" exclusive_or_expression : and_expression\n                                | exclusive_or_expression XOR and_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"bitwise xor\", (lambda x, y: x ^ y), \"(%s ^ %s)\", (False, False), p[1], p[3]\n        )\n\n\ndef p_inclusive_or_expression(p):\n    \"\"\" inclusive_or_expression : exclusive_or_expression\n                                | inclusive_or_expression OR exclusive_or_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"bitwise or\", (lambda x, y: x | y), \"(%s | %s)\", (False, False), p[1], p[3]\n        )\n\n\ndef p_logical_and_expression(p):\n    \"\"\" logical_and_expression : inclusive_or_expression\n                               | logical_and_expression AND_OP inclusive_or_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"logical and\", (lambda x, y: x and y), \"(%s and %s)\", (True, True), p[1], p[3]\n        )\n\n\ndef p_logical_or_expression(p):\n    \"\"\" logical_or_expression : logical_and_expression\n                              | logical_or_expression OR_OP logical_and_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.BinaryExpressionNode(\n            \"logical and\", (lambda x, y: x or y), \"(%s or %s)\", (True, True), p[1], p[3]\n        )\n\n\ndef p_conditional_expression(p):\n    \"\"\" conditional_expression : logical_or_expression\n                               | logical_or_expression CONDOP expression COLON conditional_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = expressions.ConditionalExpressionNode(p[1], p[3], p[5])\n\n\nassign_ops_dict = {\n    \"*=\": (\"multiply\", (lambda x, y: x * y), \"(%s * %s)\"),\n    \"/=\": (\"divide\", (lambda x, y: x / y), \"(%s / %s)\"),\n    \"%=\": (\"modulus\", (lambda x, y: x % y), \"(%s % %s)\"),\n    \"+=\": (\"addition\", (lambda x, y: x + y), \"(%s + %s)\"),\n    \"-=\": (\"subtraction\", (lambda x, y: x - y), \"(%s - %s)\"),\n    \"<<=\": (\"left shift\", (lambda x, y: x << y), \"(%s << %s)\"),\n    \">>=\": (\"right shift\", (lambda x, y: x >> y), \"(%s >> %s)\"),\n    \"&=\": (\"bitwise and\", (lambda x, y: x & y), \"(%s & %s)\"),\n    \"^=\": (\"bitwise xor\", (lambda x, y: x ^ y), \"(%s ^ %s)\"),\n    \"|=\": (\"bitwise or\", (lambda x, y: x | y), \"(%s | %s)\"),\n}\n\n\ndef p_assignment_expression(p):\n    \"\"\" assignment_expression : conditional_expression\n                              | unary_expression assignment_operator assignment_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        # In C, the value of (x*=3) is the same as (x*3). We support that here.\n        # However, we don't support the change in the value of x.\n        if p[2] == \"=\":\n            p[0] = p[3]\n        else:\n            name, op, format = assign_ops_dict[p[2]]\n            p[0] = expressions.BinaryExpressionNode(name, op, format, (True, True), p[1], p[3])\n\n\ndef p_assignment_operator(p):\n    \"\"\" assignment_operator : EQUALS\n                            | MUL_ASSIGN\n                            | DIV_ASSIGN\n                            | MOD_ASSIGN\n                            | ADD_ASSIGN\n                            | SUB_ASSIGN\n                            | LEFT_ASSIGN\n                            | RIGHT_ASSIGN\n                            | AND_ASSIGN\n                            | XOR_ASSIGN\n                            | OR_ASSIGN\n    \"\"\"\n    p[0] = p[1]\n\n\ndef p_expression(p):\n    \"\"\" expression : assignment_expression\n                   | expression COMMA assignment_expression\n    \"\"\"\n    p[0] = p[1]\n    # We don't need to support sequence expressions...\n\n\ndef p_constant_expression(p):\n    \"\"\" constant_expression : conditional_expression\n    \"\"\"\n    p[0] = p[1]\n\n\ndef p_declaration(p):\n    \"\"\" declaration : declaration_impl SEMI\n    \"\"\"\n    # The ';' must be here, not in 'declaration', as declaration needs to\n    # be executed before the ';' is shifted (otherwise the next lookahead will\n    # be read, which may be affected by this declaration if its a typedef.\n\n\ndef p_declaration_impl(p):\n    \"\"\" declaration_impl : declaration_specifier_list\n                         | declaration_specifier_list init_declarator_list\n    \"\"\"\n    declaration = cdeclarations.Declaration()\n    cdeclarations.apply_specifiers(p[1], declaration)\n\n    if len(p) == 2:\n        filename = p.slice[1].filename\n        lineno = p.slice[1].lineno\n        p.parser.cparser.impl_handle_declaration(declaration, filename, lineno)\n        return\n\n    filename = p.slice[2].filename\n    lineno = p.slice[2].lineno\n    for declarator in p[2]:\n        declaration.declarator = declarator\n        p.parser.cparser.impl_handle_declaration(declaration, filename, lineno)\n\n\ndef p_declaration_specifier_list(p):\n    \"\"\" declaration_specifier_list : gcc_attributes declaration_specifier gcc_attributes\n                                   | declaration_specifier_list declaration_specifier gcc_attributes\n    \"\"\"\n    if type(p[1]) == cdeclarations.Attrib:\n        p[0] = (p[1], p[2], p[3])\n        p.slice[0].filename = p.slice[2].filename\n        p.slice[0].lineno = p.slice[2].lineno\n    else:\n        p[0] = p[1] + (p[2], p[3])\n        p.slice[0].filename = p.slice[1].filename\n        p.slice[0].lineno = p.slice[1].lineno\n\n\ndef p_declaration_specifier(p):\n    \"\"\" declaration_specifier : storage_class_specifier\n                              | type_specifier\n                              | type_qualifier\n                              | function_specifier\n    \"\"\"\n    p[0] = p[1]\n\n\ndef p_init_declarator_list(p):\n    \"\"\" init_declarator_list : init_declarator\n                             | init_declarator_list COMMA init_declarator\n    \"\"\"\n    if len(p) > 2:\n        p[0] = p[1] + (p[3],)\n    else:\n        p[0] = (p[1],)\n\n\ndef p_init_declarator(p):\n    \"\"\" init_declarator : declarator gcc_attributes\n                        | declarator gcc_attributes EQUALS initializer\n    \"\"\"\n    p[0] = p[1]\n    p[0].attrib.update(p[2])\n    p.slice[0].filename = p.slice[1].filename\n    p.slice[0].lineno = p.slice[1].lineno\n    if len(p) > 3:\n        p[0].initializer = p[4]\n\n\ndef p_storage_class_specifier(p):\n    \"\"\" storage_class_specifier : TYPEDEF\n                                | EXTERN\n                                | STATIC\n                                | AUTO\n                                | REGISTER\n    \"\"\"\n    p[0] = cdeclarations.StorageClassSpecifier(p[1])\n\n\ndef p_type_specifier(p):\n    \"\"\" type_specifier : VOID\n                       | _BOOL\n                       | CHAR\n                       | SHORT\n                       | INT\n                       | LONG\n                       | FLOAT\n                       | DOUBLE\n                       | SIGNED\n                       | UNSIGNED\n                       | struct_or_union_specifier\n                       | enum_specifier\n                       | TYPE_NAME\n    \"\"\"\n    if type(p[1]) in (cdeclarations.StructTypeSpecifier, cdeclarations.EnumSpecifier):\n        p[0] = p[1]\n    else:\n        p[0] = cdeclarations.TypeSpecifier(p[1])\n\n\ndef p_struct_or_union_specifier(p):\n    \"\"\" struct_or_union_specifier : struct_or_union gcc_attributes IDENTIFIER LBRACE member_declaration_list RBRACE\n                                  | struct_or_union gcc_attributes TYPE_NAME LBRACE member_declaration_list RBRACE\n                                  | struct_or_union gcc_attributes LBRACE member_declaration_list RBRACE\n                                  | struct_or_union gcc_attributes IDENTIFIER\n                                  | struct_or_union gcc_attributes TYPE_NAME\n    \"\"\"\n    # format of grammar for gcc_attributes taken from c-parser.c in GCC source.\n    # The TYPE_NAME ones are dodgy, needed for Apple headers\n    # CoreServices.framework/Frameworks/CarbonCore.framework/Headers/Files.h.\n    # CoreServices.framework/Frameworks/OSServices.framework/Headers/Power.h\n    tag = None\n    decl = None\n\n    if len(p) == 4:  # struct [attributes] <id/typname>\n        tag = p[3]\n    elif p[3] == \"{\":\n        tag, decl = anonymous_struct_tagnum(), p[4]\n    else:\n        tag, decl = p[3], p[5]\n\n    p[0] = cdeclarations.StructTypeSpecifier(p[1], p[2], tag, decl)\n\n    p.slice[0].filename = p.slice[1].filename\n    p.slice[0].lineno = p.slice[1].lineno\n    p[0].filename = p.slice[1].filename\n    p[0].lineno = p.slice[1].lineno\n\n\ndef p_struct_or_union(p):\n    \"\"\" struct_or_union : STRUCT\n                        | UNION\n    \"\"\"\n    p[0] = p[1] == \"union\"\n\n\ndef p_gcc_attributes(p):\n    \"\"\" gcc_attributes :\n                       | gcc_attributes gcc_attribute\n    \"\"\"\n    # Allow empty production on attributes (take from c-parser.c in GCC source)\n    if len(p) == 1:\n        p[0] = cdeclarations.Attrib()\n    else:\n        p[0] = p[1]\n        p[0].update(p[2])\n\n\ndef p_gcc_attribute(p):\n    \"\"\" gcc_attribute : __ATTRIBUTE__ LPAREN LPAREN gcc_attrib_list RPAREN RPAREN\n    \"\"\"\n    p[0] = cdeclarations.Attrib()\n    p[0].update(p[4])\n\n\ndef p_gcc_attrib_list(p):\n    \"\"\" gcc_attrib_list : gcc_attrib\n                        | gcc_attrib_list COMMA gcc_attrib\n    \"\"\"\n    if len(p) == 2:\n        p[0] = (p[1],)\n    else:\n        p[0] = p[1] + (p[3],)\n\n\ndef p_gcc_attrib(p):\n    \"\"\" gcc_attrib :\n                   | IDENTIFIER\n                   | IDENTIFIER LPAREN argument_expression_list RPAREN\n    \"\"\"\n    if len(p) == 1:\n        p[0] = (None, None)\n    elif len(p) == 2:\n        p[0] = (p[1], True)\n    elif len(p) == 5:\n        p[0] = (p[1], p[3])\n    else:\n        raise RuntimeError(\"Should never reach this part of the grammar\")\n\n\ndef p_member_declaration_list(p):\n    \"\"\" member_declaration_list : member_declaration\n                                | member_declaration_list member_declaration\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n    else:\n        p[0] = p[1] + p[2]\n\n\ndef p_member_declaration(p):\n    \"\"\" member_declaration : specifier_qualifier_list member_declarator_list SEMI\n                           | specifier_qualifier_list SEMI\n    \"\"\"\n    # p[0] returned is a tuple, to handle multiple declarators in one\n    # declaration.\n    r = ()\n    if len(p) >= 4:\n        for declarator in p[2]:\n            declaration = cdeclarations.Declaration()\n            cdeclarations.apply_specifiers(p[1], declaration)\n            declaration.declarator = declarator\n            r += (declaration,)\n    else:\n        # anonymous field (C11/GCC extension)\n        declaration = cdeclarations.Declaration()\n        cdeclarations.apply_specifiers(p[1], declaration)\n        r = (declaration,)\n\n    p[0] = r\n\n\ndef p_specifier_qualifier_list(p):\n    \"\"\" specifier_qualifier_list : gcc_attributes specifier_qualifier gcc_attributes\n                                 | specifier_qualifier_list specifier_qualifier gcc_attributes\n    \"\"\"\n    if type(p[1]) == cdeclarations.Attrib:\n        p[0] = (p[1], p[2], p[3])\n    else:\n        p[0] = p[1] + (p[2], p[3])\n\n\ndef p_specifier_qualifier(p):\n    \"\"\" specifier_qualifier : type_specifier\n                            | type_qualifier\n    \"\"\"\n    p[0] = p[1]\n\n\ndef p_member_declarator_list(p):\n    \"\"\" member_declarator_list : member_declarator\n                               | member_declarator_list COMMA member_declarator\n    \"\"\"\n    if len(p) == 2:\n        p[0] = (p[1],)\n    else:\n        p[0] = p[1] + (p[3],)\n\n\ndef p_member_declarator(p):\n    \"\"\" member_declarator : declarator gcc_attributes\n                          | COLON constant_expression gcc_attributes\n                          | declarator COLON constant_expression gcc_attributes\n    \"\"\"\n    if p[1] == \":\":\n        p[0] = cdeclarations.Declarator()\n        p[0].bitfield = p[2]\n    else:\n        p[0] = p[1]\n        # Bitfield support\n        if p[2] == \":\":\n            p[0].bitfield = p[3]\n\n    p[0].attrib.update(p[len(p) - 1])\n\n\ndef p_enum_specifier(p):\n    \"\"\" enum_specifier : ENUM LBRACE enumerator_list RBRACE\n                       | ENUM IDENTIFIER LBRACE enumerator_list RBRACE\n                       | ENUM IDENTIFIER\n    \"\"\"\n    if len(p) == 5:\n        p[0] = cdeclarations.EnumSpecifier(None, p[3])\n    elif len(p) == 6:\n        p[0] = cdeclarations.EnumSpecifier(p[2], p[4])\n    else:\n        p[0] = cdeclarations.EnumSpecifier(p[2], ())\n\n    p[0].filename = p.slice[0].filename\n    p[0].lineno = p.slice[0].lineno\n\n\ndef p_enumerator_list(p):\n    \"\"\" enumerator_list : enumerator_list_iso\n                        | enumerator_list_iso COMMA\n    \"\"\"\n    # Apple headers sometimes have trailing ',' after enumerants, which is\n    # not ISO C.\n    p[0] = p[1]\n\n\ndef p_enumerator_list_iso(p):\n    \"\"\" enumerator_list_iso : enumerator\n                            | enumerator_list_iso COMMA enumerator\n    \"\"\"\n    if len(p) == 2:\n        p[0] = (p[1],)\n    else:\n        p[0] = p[1] + (p[3],)\n\n\ndef p_enumerator(p):\n    \"\"\" enumerator : IDENTIFIER\n                   | IDENTIFIER EQUALS constant_expression\n    \"\"\"\n    if len(p) == 2:\n        p[0] = cdeclarations.Enumerator(p[1], None)\n    else:\n        p[0] = cdeclarations.Enumerator(p[1], p[3])\n\n\ndef p_type_qualifier(p):\n    \"\"\" type_qualifier : CONST\n                       | VOLATILE\n                       | RESTRICT\n    \"\"\"\n    p[0] = cdeclarations.TypeQualifier(p[1])\n\n\ndef p_function_specifier(p):\n    \"\"\" function_specifier : INLINE\n                           | _NORETURN\n    \"\"\"\n\n\ndef p_declarator(p):\n    \"\"\" declarator : pointer direct_declarator\n                   | direct_declarator\n    \"\"\"\n    if len(p) > 2:\n        p[0] = p[1]\n        ptr = p[1]\n        while ptr.pointer:\n            ptr = ptr.pointer\n        ptr.pointer = p[2]\n        p[2].attrib.update(p[1].attrib)\n    else:\n        p[0] = p[1]\n\n\ndef p_direct_declarator(p):\n    \"\"\" direct_declarator : IDENTIFIER\n                          | LPAREN gcc_attributes declarator RPAREN\n                          | direct_declarator LBRACKET constant_expression RBRACKET\n                          | direct_declarator LBRACKET RBRACKET\n                          | direct_declarator LPAREN parameter_type_list RPAREN\n                          | direct_declarator LPAREN identifier_list RPAREN\n                          | direct_declarator LPAREN RPAREN\n    \"\"\"\n    if isinstance(p[1], cdeclarations.Declarator):\n        p[0] = p[1]\n        if p[2] == \"[\":\n            a = cdeclarations.Array()\n            a.array = p[0].array\n            p[0].array = a\n            if p[3] != \"]\":\n                a.size = p[3]\n        else:\n            if p[3] == \")\":\n                p[0].parameters = ()\n            else:\n                p[0].parameters = p[3]\n    elif p[1] == \"(\":\n        p[0] = p[3]\n        p[3].attrib.update(p[2])\n    else:\n        p[0] = cdeclarations.Declarator()\n        p[0].identifier = p[1]\n\n    # Check parameters for (void) and simplify to empty tuple.\n    if p[0].parameters and len(p[0].parameters) == 1:\n        param = p[0].parameters[0]\n        if param.type.specifiers == [\"void\"] and not param.declarator:\n            p[0].parameters = ()\n\n\ndef p_pointer(p):\n    \"\"\" pointer : TIMES\n                | TIMES type_qualifier_list\n                | TIMES pointer\n                | TIMES type_qualifier_list pointer\n    \"\"\"\n    if len(p) == 2:\n        p[0] = cdeclarations.Pointer()\n    elif len(p) == 3 and isinstance(p[2], cdeclarations.Pointer):\n        p[0] = cdeclarations.Pointer()\n        p[0].pointer = p[2]\n        p[0].attrib.update(p[2].attrib)\n    else:\n        p[0] = cdeclarations.Pointer()\n        for tq in p[2]:\n            if isinstance(tq, cdeclarations.Attrib):\n                p[0].attrib.update(tq)\n            else:\n                p[0].qualifiers += (tq,)\n\n        if len(p) == 4:\n            p[0].pointer = p[3]\n            p[0].attrib.update(p[3].attrib)\n\n\ndef p_type_qualifier_list(p):\n    \"\"\" type_qualifier_list : type_qualifier\n                            | gcc_attribute\n                            | type_qualifier_list type_qualifier\n                            | type_qualifier_list gcc_attribute\n    \"\"\"\n    if len(p) > 2:\n        p[0] = p[1] + (p[2],)\n    else:\n        p[0] = (p[1],)\n\n\ndef p_parameter_type_list(p):\n    \"\"\" parameter_type_list : parameter_list\n                            | parameter_list COMMA ELLIPSIS\n    \"\"\"\n    if len(p) > 2:\n        p[0] = p[1] + (p[3],)\n    else:\n        p[0] = p[1]\n\n\ndef p_parameter_list(p):\n    \"\"\" parameter_list : parameter_declaration\n                       | parameter_list COMMA parameter_declaration\n    \"\"\"\n    if len(p) > 2:\n        p[0] = p[1] + (p[3],)\n    else:\n        p[0] = (p[1],)\n\n\ndef p_parameter_declaration(p):\n    \"\"\" parameter_declaration : declaration_specifier_list declarator gcc_attributes\n                              | declaration_specifier_list abstract_declarator\n                              | declaration_specifier_list\n    \"\"\"\n    p[0] = cdeclarations.Parameter()\n    specs = p[1]\n\n    if len(p) == 4:\n        # add the attributes as a final specifier\n        specs += (p[3],)\n        p[0].declarator = p[2]\n    elif len(p) == 3:\n        p[0].declarator = p[2]\n\n    cdeclarations.apply_specifiers(specs, p[0])\n\n\ndef p_identifier_list(p):\n    \"\"\" identifier_list : IDENTIFIER\n                        | identifier_list COMMA IDENTIFIER\n    \"\"\"\n    param = cdeclarations.Parameter()\n    param.declarator = cdeclarations.Declarator()\n    if len(p) > 2:\n        param.declarator.identifier = p[3]\n        p[0] = p[1] + (param,)\n    else:\n        param.declarator.identifier = p[1]\n        p[0] = (param,)\n\n\ndef p_type_name(p):\n    \"\"\" type_name : specifier_qualifier_list\n                  | specifier_qualifier_list abstract_declarator\n    \"\"\"\n    typ = p[1]\n    if len(p) == 3:\n        declarator = p[2]\n    else:\n        declarator = None\n\n    declaration = cdeclarations.Declaration()\n    declaration.declarator = declarator\n    cdeclarations.apply_specifiers(typ, declaration)\n    ctype = p.parser.cparser.get_ctypes_type(declaration.type, declaration.declarator)\n    p[0] = ctype\n\n\ndef p_abstract_declarator(p):\n    \"\"\" abstract_declarator : pointer\n                            | direct_abstract_declarator         gcc_attributes\n                            | pointer direct_abstract_declarator gcc_attributes\n    \"\"\"\n    if len(p) == 2:\n        p[0] = p[1]\n        ptr = p[0]\n        while ptr.pointer:\n            ptr = ptr.pointer\n        # Only if doesn't already terminate in a declarator\n        if type(ptr) == cdeclarations.Pointer:\n            ptr.pointer = cdeclarations.Declarator()\n            ptr.pointer.attrib.update(p[1].attrib)\n        else:\n            ptr.attrib.update(p[1].attrib)\n    elif len(p) == 3:\n        p[0] = p[1]\n        p[1].attrib.update(p[2])\n    else:\n        p[0] = p[1]\n        ptr = p[0]\n        while ptr.pointer:\n            ptr = ptr.pointer\n        ptr.pointer = p[2]\n        p[2].attrib.update(p[1].attrib)\n        p[2].attrib.update(p[3])\n\n\ndef p_direct_abstract_declarator(p):\n    \"\"\" direct_abstract_declarator : LPAREN gcc_attributes abstract_declarator RPAREN\n                                   | LBRACKET RBRACKET\n                                   | LBRACKET constant_expression RBRACKET\n                                   | direct_abstract_declarator LBRACKET RBRACKET\n                                   | direct_abstract_declarator LBRACKET constant_expression RBRACKET\n                                   | LPAREN RPAREN\n                                   | LPAREN parameter_type_list RPAREN\n                                   | direct_abstract_declarator LPAREN RPAREN\n                                   | direct_abstract_declarator LPAREN parameter_type_list RPAREN\n    \"\"\"\n    if p[1] == \"(\" and isinstance(p[3], cdeclarations.Declarator):\n        p[0] = p[3]\n        p[3].attrib.update(p[2])\n    else:\n        if isinstance(p[1], cdeclarations.Declarator):\n            p[0] = p[1]\n            if p[2] == \"[\":\n                a = cdeclarations.Array()\n                a.array = p[0].array\n                p[0].array = a\n                if p[3] != \"]\":\n                    p[0].array.size = p[3]\n            elif p[2] == \"(\":\n                if p[3] == \")\":\n                    p[0].parameters = ()\n                else:\n                    p[0].parameters = p[3]\n        else:\n            p[0] = cdeclarations.Declarator()\n            if p[1] == \"[\":\n                p[0].array = cdeclarations.Array()\n                if p[2] != \"]\":\n                    p[0].array.size = p[2]\n            elif p[1] == \"(\":\n                if p[2] == \")\":\n                    p[0].parameters = ()\n                else:\n                    p[0].parameters = p[2]\n\n    # Check parameters for (void) and simplify to empty tuple.\n    if p[0].parameters and len(p[0].parameters) == 1:\n        param = p[0].parameters[0]\n        if param.type.specifiers == [\"void\"] and not param.declarator:\n            p[0].parameters = ()\n\n\ndef p_initializer(p):\n    \"\"\" initializer : assignment_expression\n                    | LBRACE initializer_list RBRACE\n                    | LBRACE initializer_list COMMA RBRACE\n    \"\"\"\n\n\ndef p_initializer_list(p):\n    \"\"\" initializer_list : initializer\n                         | initializer_list COMMA initializer\n    \"\"\"\n\n\ndef p_statement(p):\n    \"\"\" statement : labeled_statement\n                  | compound_statement\n                  | expression_statement\n                  | selection_statement\n                  | iteration_statement\n                  | jump_statement\n    \"\"\"\n\n\ndef p_labeled_statement(p):\n    \"\"\" labeled_statement : IDENTIFIER COLON statement\n                          | CASE constant_expression COLON statement\n                          | DEFAULT COLON statement\n    \"\"\"\n\n\ndef p_compound_statement(p):\n    \"\"\" compound_statement : LBRACE RBRACE\n                           | LBRACE statement_list RBRACE\n                           | LBRACE declaration_list RBRACE\n                           | LBRACE declaration_list statement_list RBRACE\n    \"\"\"\n\n\ndef p_compound_statement_error(p):\n    \"\"\" compound_statement : LBRACE error RBRACE\n    \"\"\"\n    # Error resynchronisation catch-all\n\n\ndef p_declaration_list(p):\n    \"\"\" declaration_list : declaration\n                         | declaration_list declaration\n    \"\"\"\n\n\ndef p_statement_list(p):\n    \"\"\" statement_list : statement\n                       | statement_list statement\n    \"\"\"\n\n\ndef p_expression_statement(p):\n    \"\"\" expression_statement : SEMI\n                             | expression SEMI\n    \"\"\"\n\n\ndef p_expression_statement_error(p):\n    \"\"\" expression_statement : error SEMI\n    \"\"\"\n    # Error resynchronisation catch-all\n\n\ndef p_selection_statement(p):\n    \"\"\" selection_statement : IF LPAREN expression RPAREN statement %prec IF\n                            | IF LPAREN expression RPAREN statement ELSE statement\n                            | SWITCH LPAREN expression RPAREN statement\n    \"\"\"\n\n\ndef p_iteration_statement(p):\n    \"\"\" iteration_statement : WHILE LPAREN expression RPAREN statement\n                            | DO statement WHILE LPAREN expression RPAREN SEMI\n                            | FOR LPAREN expression_statement expression_statement RPAREN statement\n                            | FOR LPAREN expression_statement expression_statement expression RPAREN statement\n    \"\"\"\n\n\ndef p_jump_statement(p):\n    \"\"\" jump_statement : GOTO IDENTIFIER SEMI\n                       | CONTINUE SEMI\n                       | BREAK SEMI\n                       | RETURN SEMI\n                       | RETURN expression SEMI\n    \"\"\"\n\n\ndef p_external_declaration(p):\n    \"\"\" external_declaration : declaration\n                             | function_definition\n    \"\"\"\n    # Intentionally empty\n\n\ndef p_function_definition(p):\n    \"\"\" function_definition : declaration_specifier_list declarator declaration_list compound_statement\n                            | declaration_specifier_list declarator compound_statement\n                            | declarator declaration_list compound_statement\n                            | declarator compound_statement\n    \"\"\"\n    # No impl of function defs\n\n\ndef p_directive(p):\n    \"\"\" directive : define\n                  | undefine\n                  | pragma\n    \"\"\"\n\n\ndef p_define(p):\n    \"\"\" define : PP_DEFINE PP_DEFINE_NAME PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_NAME type_name PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_NAME constant_expression PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN constant_expression PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN PP_END_DEFINE\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN constant_expression PP_END_DEFINE\n    \"\"\"\n    filename = p.slice[1].filename\n    lineno = p.slice[1].lineno\n\n    if p[3] != \"(\":\n        if len(p) == 4:\n            p.parser.cparser.handle_define_constant(p[2], None, filename, lineno)\n        else:\n            p.parser.cparser.handle_define_constant(p[2], p[3], filename, lineno)\n    else:\n        if p[4] == \")\":\n            params = []\n            if len(p) == 6:\n                expr = None\n            elif len(p) == 7:\n                expr = p[5]\n        else:\n            params = p[4]\n            if len(p) == 7:\n                expr = None\n            elif len(p) == 8:\n                expr = p[6]\n\n        filename = p.slice[1].filename\n        lineno = p.slice[1].lineno\n\n        p.parser.cparser.handle_define_macro(p[2], params, expr, filename, lineno)\n\n\ndef p_define_error(p):\n    \"\"\" define : PP_DEFINE error PP_END_DEFINE\n    \"\"\"\n    lexer = p[2].lexer\n    clexdata = lexer.tokens\n    start = end = p[2].clexpos\n    while clexdata[start].type != \"PP_DEFINE\":\n        start -= 1\n    while clexdata[end].type != \"PP_END_DEFINE\":\n        end += 1\n\n    name = clexdata[start + 1].value\n    if clexdata[start + 1].type == \"PP_DEFINE_NAME\":\n        params = None\n        contents = [t.value for t in clexdata[start + 2 : end]]\n    else:\n        end_of_param_list = start\n        while clexdata[end_of_param_list].value != \")\" and end_of_param_list < end:\n            end_of_param_list += 1\n        params = [t.value for t in clexdata[start + 3 : end_of_param_list] if t.value != \",\"]\n        contents = [t.value for t in clexdata[end_of_param_list + 1 : end]]\n\n    filename = p.slice[1].filename\n    lineno = p.slice[1].lineno\n\n    p[2].lexer.cparser.handle_define_unparseable(name, params, contents, filename, lineno)\n\n\ndef p_undefine(p):\n    \"\"\" undefine : PP_UNDEFINE PP_DEFINE_NAME PP_END_DEFINE\n    \"\"\"\n\n    filename = p.slice[1].filename\n    lineno = p.slice[1].lineno\n\n    macro = expressions.IdentifierExpressionNode(p[2])\n    p.parser.cparser.handle_undefine(macro, filename, lineno)\n\n\ndef p_macro_parameter_list(p):\n    \"\"\" macro_parameter_list : PP_MACRO_PARAM\n                             | macro_parameter_list COMMA PP_MACRO_PARAM\n    \"\"\"\n    if len(p) == 2:\n        p[0] = [p[1]]\n    else:\n        p[1].append(p[3])\n        p[0] = p[1]\n\n\ndef p_error(t):\n    if t.lexer.in_define:\n        # p_define_error will generate an error message.\n        pass\n    else:\n        if t.type == \"$end\":\n            t.parser.cparser.handle_error(\"Syntax error at end of file.\", t.filename, 0)\n        else:\n            t.lexer.cparser.handle_error(\"Syntax error at %r\" % t.value, t.filename, t.lineno)\n    # Don't alter lexer: default behaviour is to pass error production\n    # up until it hits the catch-all at declaration, at which point\n    # parsing continues (synchronisation).\n\n\ndef p_pragma(p):\n    \"\"\" pragma : pragma_pack\n               | PRAGMA pragma_directive_list PRAGMA_END\n    \"\"\"\n\n\ndef p_pragma_pack(p):\n    \"\"\" pragma_pack : PRAGMA PRAGMA_PACK LPAREN RPAREN PRAGMA_END\n                    | PRAGMA PRAGMA_PACK LPAREN constant RPAREN PRAGMA_END\n                    | PRAGMA PRAGMA_PACK LPAREN pragma_pack_stack_args RPAREN PRAGMA_END\n    \"\"\"\n\n    err = None\n    if len(p) == 6:\n        cdeclarations.pragma_pack.set_default()\n    elif isinstance(p[4], tuple):\n        op, id, n = p[4]\n        if op == \"push\":\n            err = cdeclarations.pragma_pack.push(id, n)\n        elif op == \"pop\":\n            err = cdeclarations.pragma_pack.pop(id)\n        else:\n            err = \"Syntax error for #pragma pack at {}:{}\".format(\n                p.slice[1].filename, p.slice[1].lineno\n            )\n    else:\n        cdeclarations.pragma_pack.current = p[4]\n\n    if err:\n        p.lexer.cparser.handle_error(err, p.slice[1].filename, p.slice[1].lineno)\n\n\ndef p_pragma_pack_stack_args(p):\n    \"\"\" pragma_pack_stack_args : IDENTIFIER\n                               | IDENTIFIER COMMA IDENTIFIER\n                               | IDENTIFIER COMMA IDENTIFIER COMMA constant\n                               | IDENTIFIER COMMA constant COMMA IDENTIFIER\n                               | IDENTIFIER COMMA constant\n    \"\"\"\n    op, id, n = p[1], None, None\n\n    if len(p) > 2:\n        if isinstance(p[3], expressions.ConstantExpressionNode):\n            n = p[3].value\n\n            if len(p) > 4:\n                id = p[5]\n        else:\n            id = p[3]\n\n            if len(p) > 4:\n                n = p[5].value\n\n    p[0] = (op, id, n)\n\n\ndef p_pragma_directive_list(p):\n    \"\"\" pragma_directive_list : pragma_directive\n                              | pragma_directive_list pragma_directive\n    \"\"\"\n    if len(p) == 3:\n        p[0] = p[1] + (p[2],)\n    else:\n        p[0] = (p[1],)\n\n\ndef p_pragma_directive(p):\n    \"\"\" pragma_directive : IDENTIFIER\n                         | string_literal\n    \"\"\"\n    p[0] = p[1]\n\n\ndef main():\n    yacc.yacc(tabmodule=\"new_parsetab\")\n"
  },
  {
    "path": "ctypesgen/parser/cparser.py",
    "content": "\"\"\"\nParse a C source file.\n\nTo use, subclass CParser and override its handle_* methods.  Then instantiate\nthe class with a string to parse.\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\nimport os.path\nimport sys\n\nfrom ctypesgen.parser import cgrammar, preprocessor, yacc\n\n# --------------------------------------------------------------------------\n# Lexer\n# --------------------------------------------------------------------------\n\n\nclass CLexer(object):\n    def __init__(self, cparser):\n        self.cparser = cparser\n        self.type_names = set()\n        self.in_define = False\n        self.lineno = -1\n        self.lexpos = -1\n\n    def input(self, tokens):\n        self.tokens = tokens\n        self.pos = 0\n\n    def token(self):\n        while self.pos < len(self.tokens):\n            t = self.tokens[self.pos]\n\n            self.pos += 1\n\n            if not t:\n                break\n\n            if t.type == \"PP_DEFINE\":\n                self.in_define = True\n            elif t.type == \"PP_END_DEFINE\":\n                self.in_define = False\n\n            # Transform PP tokens into C tokens\n            elif t.type == \"IDENTIFIER\" and t.value in cgrammar.keywords:\n                t.type = cgrammar.keyword_map[t.value]\n            elif t.type == \"IDENTIFIER\" and t.value in self.type_names:\n                if self.pos < 2 or self.tokens[self.pos - 2].type not in (\n                    \"VOID\",\n                    \"_BOOL\",\n                    \"CHAR\",\n                    \"SHORT\",\n                    \"INT\",\n                    \"LONG\",\n                    \"FLOAT\",\n                    \"DOUBLE\",\n                    \"SIGNED\",\n                    \"UNSIGNED\",\n                    \"ENUM\",\n                    \"STRUCT\",\n                    \"UNION\",\n                    \"TYPE_NAME\",\n                ):\n                    t.type = \"TYPE_NAME\"\n\n            t.lexer = self\n            t.clexpos = self.pos - 1\n\n            return t\n        return None\n\n\n# --------------------------------------------------------------------------\n# Parser\n# --------------------------------------------------------------------------\n\n\nclass CParser(object):\n    \"\"\"Parse a C source file.\n\n    Subclass and override the handle_* methods.  Call `parse` with a string\n    to parse.\n    \"\"\"\n\n    def __init__(self, options):\n        super(CParser, self).__init__()\n        self.preprocessor_parser = preprocessor.PreprocessorParser(options, self)\n        self.parser = yacc.yacc(\n            method=\"LALR\",\n            debug=False,\n            module=cgrammar,\n            write_tables=True,\n            outputdir=os.path.dirname(__file__),\n            optimize=True,\n        )\n\n        self.parser.errorfunc = cgrammar.p_error\n        self.parser.cparser = self\n\n        self.lexer = CLexer(self)\n        if not options.no_stddef_types:\n            self.lexer.type_names.add(\"wchar_t\")\n            self.lexer.type_names.add(\"ptrdiff_t\")\n            self.lexer.type_names.add(\"size_t\")\n        if not options.no_gnu_types:\n            self.lexer.type_names.add(\"__builtin_va_list\")\n        if sys.platform == \"win32\" and not options.no_python_types:\n            self.lexer.type_names.add(\"__int64\")\n\n    def parse(self, filename, debug=False):\n        \"\"\"Parse a file.\n\n        If `debug` is True, parsing state is dumped to stdout.\n        \"\"\"\n\n        self.handle_status(\"Preprocessing %s\" % filename)\n        self.preprocessor_parser.parse(filename)\n        self.lexer.input(self.preprocessor_parser.output)\n        self.handle_status(\"Parsing %s\" % filename)\n        self.parser.parse(lexer=self.lexer, debug=debug, tracking=True)\n\n    # ----------------------------------------------------------------------\n    # Parser interface.  Override these methods in your subclass.\n    # ----------------------------------------------------------------------\n\n    def handle_error(self, message, filename, lineno):\n        \"\"\"A parse error occurred.\n\n        The default implementation prints `lineno` and `message` to stderr.\n        The parser will try to recover from errors by synchronising at the\n        next semicolon.\n        \"\"\"\n        sys.stderr.write(\"%s:%s %s\\n\" % (filename, lineno, message))\n\n    def handle_pp_error(self, message):\n        \"\"\"The C preprocessor emitted an error.\n\n        The default implementation prints the error to stderr. If processing\n        can continue, it will.\n        \"\"\"\n        sys.stderr.write(\"Preprocessor: {}\\n\".format(message))\n\n    def handle_status(self, message):\n        \"\"\"Progress information.\n\n        The default implementationg prints message to stderr.\n        \"\"\"\n        sys.stderr.write(\"{}\\n\".format(message))\n\n    def handle_define(self, name, params, value, filename, lineno):\n        \"\"\"#define `name` `value`\n        or #define `name`(`params`) `value`\n\n        name is a string\n        params is None or a list of strings\n        value is a ...?\n        \"\"\"\n\n    def handle_define_constant(self, name, value, filename, lineno):\n        \"\"\"#define `name` `value`\n\n        name is a string\n        value is an ExpressionNode or None\n        \"\"\"\n\n    def handle_define_macro(self, name, params, value, filename, lineno):\n        \"\"\"#define `name`(`params`) `value`\n\n        name is a string\n        params is a list of strings\n        value is an ExpressionNode or None\n        \"\"\"\n\n    def handle_undefine(self, name, filename, lineno):\n        \"\"\"#undef `name`\n\n        name is a string\n        \"\"\"\n\n    def impl_handle_declaration(self, declaration, filename, lineno):\n        \"\"\"Internal method that calls `handle_declaration`.  This method\n        also adds any new type definitions to the lexer's list of valid type\n        names, which affects the parsing of subsequent declarations.\n        \"\"\"\n        if declaration.storage == \"typedef\":\n            declarator = declaration.declarator\n            if not declarator:\n                # XXX TEMPORARY while struct etc not filled\n                return\n            while declarator.pointer:\n                declarator = declarator.pointer\n            self.lexer.type_names.add(declarator.identifier)\n        self.handle_declaration(declaration, filename, lineno)\n\n    def handle_declaration(self, declaration, filename, lineno):\n        \"\"\"A declaration was encountered.\n\n        `declaration` is an instance of Declaration.  Where a declaration has\n        multiple initialisers, each is returned as a separate declaration.\n        \"\"\"\n        pass\n\n\nclass DebugCParser(CParser):\n    \"\"\"A convenience class that prints each invocation of a handle_* method to\n    stdout.\n    \"\"\"\n\n    def handle_define(self, name, value, filename, lineno):\n        print(\"#define name=%r, value=%r\" % (name, value))\n\n    def handle_define_constant(self, name, value, filename, lineno):\n        print(\"#define constant name=%r, value=%r\" % (name, value))\n\n    def handle_declaration(self, declaration, filename, lineno):\n        print(declaration)\n\n    def get_ctypes_type(self, typ, declarator):\n        return typ\n\n    def handle_define_unparseable(self, name, params, value, filename, lineno):\n        if params:\n            original_string = \"#define %s(%s) %s\" % (name, \",\".join(params), \" \".join(value))\n        else:\n            original_string = \"#define %s %s\" % (name, \" \".join(value))\n        print(original_string)\n\n\nif __name__ == \"__main__\":\n    DebugCParser().parse(sys.argv[1], debug=True)\n"
  },
  {
    "path": "ctypesgen/parser/ctypesparser.py",
    "content": "\"\"\"\nctypesgen.parser.ctypesparser contains a class, CtypesParser, which is a\nsubclass of ctypesgen.parser.cparser.CParser. CtypesParser overrides the\nhandle_declaration() method of CParser. It turns the low-level type declarations\nproduced by CParser into CtypesType instances and breaks the parser's general\ndeclarations into function, variable, typedef, constant, and type descriptions.\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\n__all__ = [\"CtypesParser\"]\n\nfrom ctypesgen.ctypedescs import (\n    CtypesArray,\n    CtypesBitfield,\n    CtypesEnum,\n    CtypesFunction,\n    CtypesPointer,\n    CtypesSimple,\n    CtypesSpecial,\n    CtypesStruct,\n    CtypesTypedef,\n    ctypes_type_map,\n    ctypes_type_map_python_builtin,\n    remove_function_pointer,\n)\nfrom ctypesgen.expressions import (\n    BinaryExpressionNode,\n    ConstantExpressionNode,\n    IdentifierExpressionNode,\n)\nfrom ctypesgen.parser.cdeclarations import (\n    Attrib,\n    EnumSpecifier,\n    Pointer,\n    StructTypeSpecifier,\n)\nfrom ctypesgen.parser.cparser import CParser\n\n\ndef make_enum_from_specifier(specifier):\n    tag = specifier.tag\n\n    enumerators = []\n    last_name = None\n    for e in specifier.enumerators:\n        if e.expression:\n            value = e.expression\n        else:\n            if last_name:\n                value = BinaryExpressionNode(\n                    \"addition\",\n                    (lambda x, y: x + y),\n                    \"(%s + %s)\",\n                    (False, False),\n                    IdentifierExpressionNode(last_name),\n                    ConstantExpressionNode(1),\n                )\n            else:\n                value = ConstantExpressionNode(0)\n\n        enumerators.append((e.name, value))\n        last_name = e.name\n\n    return CtypesEnum(tag, enumerators, src=(specifier.filename, specifier.lineno))\n\n\ndef get_decl_id(decl):\n    \"\"\"Return the identifier of a given declarator\"\"\"\n    while isinstance(decl, Pointer):\n        decl = decl.pointer\n    p_name = \"\"\n    if decl is not None and decl.identifier is not None:\n        p_name = decl.identifier\n    return p_name\n\n\nclass CtypesParser(CParser):\n    \"\"\"Parse a C file for declarations that can be used by ctypes.\n\n    Subclass and override the handle_ctypes_* methods.\n    \"\"\"\n\n    def __init__(self, options):\n        super(CtypesParser, self).__init__(options)\n        self.type_map = ctypes_type_map\n        if not options.no_python_types:\n            self.type_map.update(ctypes_type_map_python_builtin)\n\n    def make_struct_from_specifier(self, specifier):\n        variety = {True: \"union\", False: \"struct\"}[specifier.is_union]\n        tag = specifier.tag\n\n        if specifier.declarations:\n            members = []\n            for declaration in specifier.declarations:\n                t = self.get_ctypes_type(\n                    declaration.type, declaration.declarator, check_qualifiers=True\n                )\n                declarator = declaration.declarator\n                if declarator is None:\n                    # Anonymous field in nested union/struct (C11/GCC).\n                    name = None\n                else:\n                    while declarator.pointer:\n                        declarator = declarator.pointer\n                    name = declarator.identifier\n                members.append((name, remove_function_pointer(t)))\n        else:\n            members = None\n\n        return CtypesStruct(\n            tag, specifier.attrib, variety, members, src=(specifier.filename, specifier.lineno)\n        )\n\n    def get_ctypes_type(self, typ, declarator, check_qualifiers=False):\n        signed = True\n        typename = \"int\"\n        longs = 0\n        t = None\n\n        for specifier in typ.specifiers:\n            if isinstance(specifier, StructTypeSpecifier):\n                t = self.make_struct_from_specifier(specifier)\n            elif isinstance(specifier, EnumSpecifier):\n                t = make_enum_from_specifier(specifier)\n            elif specifier == \"signed\":\n                signed = True\n            elif specifier == \"unsigned\":\n                signed = False\n            elif specifier == \"long\":\n                longs += 1\n            elif specifier == \"short\":\n                longs = -1\n            else:\n                typename = str(specifier)\n\n        if not t:\n            # It is a numeric type of some sort\n            if (typename, signed, longs) in self.type_map:\n                t = CtypesSimple(typename, signed, longs)\n\n            elif signed and not longs:\n                t = CtypesTypedef(typename)\n\n            else:\n                name = \" \".join(typ.specifiers)\n                if typename in [x[0] for x in self.type_map.keys()]:\n                    # It's an unsupported variant of a builtin type\n                    error = 'Ctypes does not support the type \"%s\".' % name\n                else:\n                    error = (\n                        \"Ctypes does not support adding additional \"\n                        'specifiers to typedefs, such as \"%s\"' % name\n                    )\n                t = CtypesTypedef(name)\n                t.error(error, cls=\"unsupported-type\")\n\n            if declarator and declarator.bitfield:\n                t = CtypesBitfield(t, declarator.bitfield)\n\n        qualifiers = []\n        qualifiers.extend(typ.qualifiers)\n        while declarator and declarator.pointer:\n            if declarator.parameters is not None:\n                variadic = \"...\" in declarator.parameters\n\n                params = []\n                for param in declarator.parameters:\n                    if param == \"...\":\n                        break\n                    param_name = get_decl_id(param.declarator)\n                    ct = self.get_ctypes_type(param.type, param.declarator)\n                    ct.identifier = param_name\n                    params.append(ct)\n                t = CtypesFunction(t, params, variadic)\n\n            a = declarator.array\n            while a:\n                t = CtypesArray(t, a.size)\n                a = a.array\n\n            qualifiers.extend(declarator.qualifiers)\n\n            t = CtypesPointer(t, tuple(typ.qualifiers) + tuple(declarator.qualifiers))\n\n            declarator = declarator.pointer\n\n        if declarator and declarator.parameters is not None:\n            variadic = \"...\" in declarator.parameters\n\n            params = []\n            for param in declarator.parameters:\n                if param == \"...\":\n                    break\n                param_name = get_decl_id(param.declarator)\n                ct = self.get_ctypes_type(param.type, param.declarator)\n                ct.identifier = param_name\n                params.append(ct)\n            t = CtypesFunction(t, params, variadic, declarator.attrib)\n\n        if declarator:\n            a = declarator.array\n            while a:\n                t = CtypesArray(t, a.size)\n                a = a.array\n\n        if (\n            isinstance(t, CtypesPointer)\n            and isinstance(t.destination, CtypesSimple)\n            and t.destination.name == \"char\"\n            and t.destination.signed\n        ):\n            t = CtypesSpecial(\"String\")\n\n        return t\n\n    def handle_declaration(self, declaration, filename, lineno):\n        t = self.get_ctypes_type(declaration.type, declaration.declarator)\n\n        if type(t) in (CtypesStruct, CtypesEnum):\n            self.handle_ctypes_new_type(remove_function_pointer(t), filename, lineno)\n\n        declarator = declaration.declarator\n        if declarator is None:\n            # XXX TEMPORARY while struct with no typedef not filled in\n            return\n        while declarator.pointer:\n            declarator = declarator.pointer\n        name = declarator.identifier\n        if declaration.storage == \"typedef\":\n            self.handle_ctypes_typedef(name, remove_function_pointer(t), filename, lineno)\n        elif type(t) == CtypesFunction:\n            attrib = Attrib(t.attrib)\n            attrib.update(declaration.attrib)\n            self.handle_ctypes_function(\n                name, t.restype, t.argtypes, t.errcheck, t.variadic, attrib, filename, lineno\n            )\n        elif declaration.storage != \"static\":\n            self.handle_ctypes_variable(name, t, filename, lineno)\n\n    # ctypes parser interface.  Override these methods in your subclass.\n\n    def handle_ctypes_new_type(self, ctype, filename, lineno):\n        pass\n\n    def handle_ctypes_typedef(self, name, ctype, filename, lineno):\n        pass\n\n    def handle_ctypes_function(\n        self, name, restype, argtypes, errcheck, variadic, attrib, filename, lineno\n    ):\n        pass\n\n    def handle_ctypes_variable(self, name, ctype, filename, lineno):\n        pass\n"
  },
  {
    "path": "ctypesgen/parser/datacollectingparser.py",
    "content": "\"\"\"\nDataCollectingParser subclasses ctypesparser.CtypesParser and builds Description\nobjects from the CtypesType objects and other information from CtypesParser.\nAfter parsing is complete, a DescriptionCollection object can be retrieved by\ncalling DataCollectingParser.data().\n\"\"\"\n\nimport os\nfrom tempfile import mkstemp\n\nfrom ctypesgen.ctypedescs import CtypesEnum, CtypesType, CtypesTypeVisitor\nfrom ctypesgen.descriptions import (\n    ConstantDescription,\n    DescriptionCollection,\n    EnumDescription,\n    FunctionDescription,\n    MacroDescription,\n    StructDescription,\n    TypedefDescription,\n    UndefDescription,\n    VariableDescription,\n)\nfrom ctypesgen.expressions import ConstantExpressionNode\nfrom ctypesgen.messages import error_message, status_message\nfrom ctypesgen.parser import ctypesparser\n\n\nclass DataCollectingParser(ctypesparser.CtypesParser, CtypesTypeVisitor):\n    \"\"\"Main class for the Parser component. Steps for use:\n    p=DataCollectingParser(names_of_header_files,options)\n    p.parse()\n    data=p.data() #A dictionary of constants, enums, structs, functions, etc.\n    \"\"\"\n\n    def __init__(self, headers, options):\n        super(DataCollectingParser, self).__init__(options)\n        self.headers = headers\n        self.options = options\n\n        self.constants = []\n        self.typedefs = []\n        self.structs = []\n        self.enums = []\n        self.functions = []\n        self.variables = []\n        self.macros = []\n\n        self.all = []\n        self.output_order = []\n\n        # NULL is a useful macro to have defined\n        null = ConstantExpressionNode(None)\n        nullmacro = ConstantDescription(\"NULL\", null, (\"<built-in>\", 1))\n        self.constants.append(nullmacro)\n        self.all.append(nullmacro)\n        self.output_order.append((\"constant\", nullmacro))\n\n        # A list of tuples describing macros; saved to be processed after\n        # everything else has been parsed\n        self.saved_macros = []\n        # A set of structs that are already known\n        self.already_seen_structs = set()\n        # A dict of structs that have only been seen in opaque form\n        self.already_seen_opaque_structs = {}\n        # A set of enums that are already known\n        self.already_seen_enums = set()\n        # A dict of enums that have only been seen in opaque form\n        self.already_seen_opaque_enums = {}\n\n    def parse(self):\n        fd, fname = mkstemp(suffix=\".h\")\n        with os.fdopen(fd, \"w\") as f:\n            for header in self.options.other_headers:\n                f.write(\"#include <%s>\\n\" % header)\n            for header in self.headers:\n                f.write('#include \"%s\"\\n' % os.path.abspath(header))\n            f.flush()\n        try:\n            super(DataCollectingParser, self).parse(fname, self.options.debug_level)\n        finally:\n            os.unlink(fname)\n\n        for name, params, expr, (filename, lineno) in self.saved_macros:\n            self.handle_macro(name, params, expr, filename, lineno)\n\n    def handle_define_constant(self, name, expr, filename, lineno):\n        # Called by CParser\n        # Save to handle later\n        self.saved_macros.append((name, None, expr, (filename, lineno)))\n\n    def handle_define_unparseable(self, name, params, value, filename, lineno):\n        # Called by CParser\n        if params:\n            original_string = \"#define %s(%s) %s\" % (name, \",\".join(params), \" \".join(value))\n        else:\n            original_string = \"#define %s %s\" % (name, \" \".join(value))\n        macro = MacroDescription(name, params, None, src=(filename, lineno))\n        macro.error('Could not parse macro \"%s\"' % original_string, cls=\"macro\")\n        macro.original_string = original_string\n        self.macros.append(macro)\n        self.all.append(macro)\n        self.output_order.append((\"macro\", macro))\n\n    def handle_define_macro(self, name, params, expr, filename, lineno):\n        # Called by CParser\n        # Save to handle later\n        self.saved_macros.append((name, params, expr, (filename, lineno)))\n\n    def handle_undefine(self, macro, filename, lineno):\n        # save to handle later to get order correct\n        self.saved_macros.append((\"#undef\", None, macro, (filename, lineno)))\n\n    def handle_ctypes_typedef(self, name, ctype, filename, lineno):\n        # Called by CtypesParser\n        ctype.visit(self)\n\n        typedef = TypedefDescription(name, ctype, src=(filename, repr(lineno)))\n\n        self.typedefs.append(typedef)\n        self.all.append(typedef)\n        self.output_order.append((\"typedef\", typedef))\n\n    def handle_ctypes_new_type(self, ctype, filename, lineno):\n        # Called by CtypesParser\n        if isinstance(ctype, CtypesEnum):\n            self.handle_enum(ctype, filename, lineno)\n        else:\n            self.handle_struct(ctype, filename, lineno)\n\n    def handle_ctypes_function(\n        self, name, restype, argtypes, errcheck, variadic, attrib, filename, lineno\n    ):\n        # Called by CtypesParser\n        restype.visit(self)\n        for argtype in argtypes:\n            argtype.visit(self)\n\n        function = FunctionDescription(\n            name, restype, argtypes, errcheck, variadic, attrib, src=(filename, repr(lineno))\n        )\n\n        self.functions.append(function)\n        self.all.append(function)\n        self.output_order.append((\"function\", function))\n\n    def handle_ctypes_variable(self, name, ctype, filename, lineno):\n        # Called by CtypesParser\n        ctype.visit(self)\n\n        variable = VariableDescription(name, ctype, src=(filename, repr(lineno)))\n\n        self.variables.append(variable)\n        self.all.append(variable)\n        self.output_order.append((\"variable\", variable))\n\n    def handle_struct(self, ctypestruct, filename, lineno):\n        # Called from within DataCollectingParser\n\n        # When we find an opaque struct, we make a StructDescription for it\n        # and record it in self.already_seen_opaque_structs. If we later\n        # find a transparent struct with the same tag, we fill in the\n        # opaque struct with the information from the transparent struct and\n        # move the opaque struct to the end of the struct list.\n\n        name = \"%s %s\" % (ctypestruct.variety, ctypestruct.tag)\n\n        if name in self.already_seen_structs:\n            return\n\n        if ctypestruct.opaque:\n            if name not in self.already_seen_opaque_structs:\n                struct = StructDescription(\n                    ctypestruct.tag,\n                    ctypestruct.attrib,\n                    ctypestruct.variety,\n                    None,  # No members\n                    True,  # Opaque\n                    ctypestruct,\n                    src=(filename, str(lineno)),\n                )\n\n                self.already_seen_opaque_structs[name] = struct\n                self.structs.append(struct)\n                self.all.append(struct)\n                self.output_order.append((\"struct\", struct))\n\n        else:\n            for membername, ctype in ctypestruct.members:\n                ctype.visit(self)\n\n            if name in self.already_seen_opaque_structs:\n                # Fill in older version\n                struct = self.already_seen_opaque_structs[name]\n                struct.opaque = False\n                struct.members = ctypestruct.members\n                struct.ctype = ctypestruct\n                struct.src = ctypestruct.src\n\n                self.output_order.append((\"struct-body\", struct))\n\n                del self.already_seen_opaque_structs[name]\n\n            else:\n                struct = StructDescription(\n                    ctypestruct.tag,\n                    ctypestruct.attrib,\n                    ctypestruct.variety,\n                    ctypestruct.members,\n                    False,  # Not opaque\n                    src=(filename, str(lineno)),\n                    ctype=ctypestruct,\n                )\n                self.structs.append(struct)\n                self.all.append(struct)\n                self.output_order.append((\"struct\", struct))\n                self.output_order.append((\"struct-body\", struct))\n\n            self.already_seen_structs.add(name)\n\n    def handle_enum(self, ctypeenum, filename, lineno):\n        # Called from within DataCollectingParser.\n\n        # Process for handling opaque enums is the same as process for opaque\n        # structs. See handle_struct() for more details.\n\n        tag = ctypeenum.tag\n        if tag in self.already_seen_enums:\n            return\n\n        if ctypeenum.opaque:\n            if tag not in self.already_seen_opaque_enums:\n                enum = EnumDescription(ctypeenum.tag, None, ctypeenum, src=(filename, str(lineno)))\n                enum.opaque = True\n\n                self.already_seen_opaque_enums[tag] = enum\n                self.enums.append(enum)\n                self.all.append(enum)\n                self.output_order.append((\"enum\", enum))\n\n        else:\n            if tag in self.already_seen_opaque_enums:\n                # Fill in older opaque version\n                enum = self.already_seen_opaque_enums[tag]\n                enum.opaque = False\n                enum.ctype = ctypeenum\n                enum.src = ctypeenum.src\n                enum.members = ctypeenum.enumerators\n\n                del self.already_seen_opaque_enums[tag]\n\n            else:\n                enum = EnumDescription(\n                    ctypeenum.tag,\n                    ctypeenum.enumerators,\n                    src=(filename, str(lineno)),\n                    ctype=ctypeenum,\n                )\n                enum.opaque = False\n\n                self.enums.append(enum)\n                self.all.append(enum)\n                self.output_order.append((\"enum\", enum))\n\n            self.already_seen_enums.add(tag)\n\n            for enumname, expr in ctypeenum.enumerators:\n                constant = ConstantDescription(enumname, expr, src=(filename, lineno))\n\n                self.constants.append(constant)\n                self.all.append(constant)\n                self.output_order.append((\"constant\", constant))\n\n    def handle_macro(self, name, params, expr, filename, lineno):\n        # Called from within DataCollectingParser\n        src = (filename, lineno)\n\n        if expr is None:\n            expr = ConstantExpressionNode(True)\n            constant = ConstantDescription(name, expr, src)\n            self.constants.append(constant)\n            self.all.append(constant)\n            return\n\n        expr.visit(self)\n\n        if isinstance(expr, CtypesType):\n            if params:\n                macro = MacroDescription(name, \"\", src)\n                macro.error(\n                    \"%s has parameters but evaluates to a type. \"\n                    \"Ctypesgen does not support it.\" % macro.casual_name(),\n                    cls=\"macro\",\n                )\n                self.macros.append(macro)\n                self.all.append(macro)\n                self.output_order.append((\"macro\", macro))\n\n            else:\n                typedef = TypedefDescription(name, expr, src)\n                self.typedefs.append(typedef)\n                self.all.append(typedef)\n                self.output_order.append((\"typedef\", typedef))\n\n        elif name == \"#undef\":\n            undef = UndefDescription(expr, src)\n            self.all.append(undef)\n            self.output_order.append((\"undef\", undef))\n        else:\n            macro = MacroDescription(name, params, expr, src)\n            self.macros.append(macro)\n            self.all.append(macro)\n            self.output_order.append((\"macro\", macro))\n\n        # Macros could possibly contain things like __FILE__, __LINE__, etc...\n        # This could be supported, but it would be a lot of work. It would\n        # probably also bloat the Preamble considerably.\n\n    def handle_error(self, message, filename, lineno):\n        # Called by CParser\n        error_message(\"%s:%d: %s\" % (filename, lineno, message), cls=\"cparser\")\n\n    def handle_pp_error(self, message):\n        # Called by PreprocessorParser\n        error_message(\"%s: %s\" % (self.options.cpp, message), cls=\"cparser\")\n\n    def handle_status(self, message):\n        # Called by CParser\n        status_message(message)\n\n    def visit_struct(self, struct):\n        self.handle_struct(struct, struct.src[0], struct.src[1])\n\n    def visit_enum(self, enum):\n        self.handle_enum(enum, enum.src[0], enum.src[1])\n\n    def data(self):\n        return DescriptionCollection(\n            self.constants,\n            self.typedefs,\n            self.structs,\n            self.enums,\n            self.functions,\n            self.variables,\n            self.macros,\n            self.all,\n            self.output_order,\n        )\n"
  },
  {
    "path": "ctypesgen/parser/lex.py",
    "content": "# -----------------------------------------------------------------------------\n# ply: lex.py\n#\n# Copyright (C) 2001-2018\n# David M. Beazley (Dabeaz LLC)\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright notice,\n#   this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above copyright notice,\n#   this list of conditions and the following disclaimer in the documentation\n#   and/or other materials provided with the distribution.\n# * Neither the name of the David Beazley or Dabeaz LLC may be used to\n#   endorse or promote products derived from this software without\n#  specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n# -----------------------------------------------------------------------------\n\n__version__    = '3.11'\n__tabversion__ = '3.10'\n\nimport re\nimport sys\nimport types\nimport copy\nimport os\nimport inspect\n\n# This tuple contains known string types\ntry:\n    # Python 2.6\n    StringTypes = (types.StringType, types.UnicodeType)\nexcept AttributeError:\n    # Python 3.0\n    StringTypes = (str, bytes)\n\n# This regular expression is used to match valid token names\n_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')\n\n# Exception thrown when invalid token encountered and no default error\n# handler is defined.\nclass LexError(Exception):\n    def __init__(self, message, s):\n        self.args = (message,)\n        self.text = s\n\n\n# Token class.  This class is used to represent the tokens produced.\nclass LexToken(object):\n    def __str__(self):\n        return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)\n\n    def __repr__(self):\n        return str(self)\n\n\n# This object is a stand-in for a logging object created by the\n# logging module.\n\nclass PlyLogger(object):\n    def __init__(self, f):\n        self.f = f\n\n    def critical(self, msg, *args, **kwargs):\n        self.f.write((msg % args) + '\\n')\n\n    def warning(self, msg, *args, **kwargs):\n        self.f.write('WARNING: ' + (msg % args) + '\\n')\n\n    def error(self, msg, *args, **kwargs):\n        self.f.write('ERROR: ' + (msg % args) + '\\n')\n\n    info = critical\n    debug = critical\n\n\n# Null logger is used when no output is generated. Does nothing.\nclass NullLogger(object):\n    def __getattribute__(self, name):\n        return self\n\n    def __call__(self, *args, **kwargs):\n        return self\n\n\n# -----------------------------------------------------------------------------\n#                        === Lexing Engine ===\n#\n# The following Lexer class implements the lexer runtime.   There are only\n# a few public methods and attributes:\n#\n#    input()          -  Store a new string in the lexer\n#    token()          -  Get the next token\n#    clone()          -  Clone the lexer\n#\n#    lineno           -  Current line number\n#    lexpos           -  Current position in the input string\n# -----------------------------------------------------------------------------\n\nclass Lexer:\n    def __init__(self):\n        self.lexre = None             # Master regular expression. This is a list of\n                                      # tuples (re, findex) where re is a compiled\n                                      # regular expression and findex is a list\n                                      # mapping regex group numbers to rules\n        self.lexretext = None         # Current regular expression strings\n        self.lexstatere = {}          # Dictionary mapping lexer states to master regexs\n        self.lexstateretext = {}      # Dictionary mapping lexer states to regex strings\n        self.lexstaterenames = {}     # Dictionary mapping lexer states to symbol names\n        self.lexstate = 'INITIAL'     # Current lexer state\n        self.lexstatestack = []       # Stack of lexer states\n        self.lexstateinfo = None      # State information\n        self.lexstateignore = {}      # Dictionary of ignored characters for each state\n        self.lexstateerrorf = {}      # Dictionary of error functions for each state\n        self.lexstateeoff = {}        # Dictionary of eof functions for each state\n        self.lexreflags = 0           # Optional re compile flags\n        self.lexdata = None           # Actual input data (as a string)\n        self.lexpos = 0               # Current position in input text\n        self.lexlen = 0               # Length of the input text\n        self.lexerrorf = None         # Error rule (if any)\n        self.lexeoff = None           # EOF rule (if any)\n        self.lextokens = None         # List of valid tokens\n        self.lexignore = ''           # Ignored characters\n        self.lexliterals = ''         # Literal characters that can be passed through\n        self.lexmodule = None         # Module\n        self.lineno = 1               # Current line number\n        self.lexoptimize = False      # Optimized mode\n\n    def clone(self, object=None):\n        c = copy.copy(self)\n\n        # If the object parameter has been supplied, it means we are attaching the\n        # lexer to a new object.  In this case, we have to rebind all methods in\n        # the lexstatere and lexstateerrorf tables.\n\n        if object:\n            newtab = {}\n            for key, ritem in self.lexstatere.items():\n                newre = []\n                for cre, findex in ritem:\n                    newfindex = []\n                    for f in findex:\n                        if not f or not f[0]:\n                            newfindex.append(f)\n                            continue\n                        newfindex.append((getattr(object, f[0].__name__), f[1]))\n                newre.append((cre, newfindex))\n                newtab[key] = newre\n            c.lexstatere = newtab\n            c.lexstateerrorf = {}\n            for key, ef in self.lexstateerrorf.items():\n                c.lexstateerrorf[key] = getattr(object, ef.__name__)\n            c.lexmodule = object\n        return c\n\n    # ------------------------------------------------------------\n    # writetab() - Write lexer information to a table file\n    # ------------------------------------------------------------\n    def writetab(self, lextab, outputdir=''):\n        if isinstance(lextab, types.ModuleType):\n            raise IOError(\"Won't overwrite existing lextab module\")\n        basetabmodule = lextab.split('.')[-1]\n        filename = os.path.join(outputdir, basetabmodule) + '.py'\n        with open(filename, 'w') as tf:\n            tf.write('# %s.py. This file automatically created by PLY (version %s). Don\\'t edit!\\n' % (basetabmodule, __version__))\n            tf.write('_tabversion   = %s\\n' % repr(__tabversion__))\n            tf.write('_lextokens    = set(%s)\\n' % repr(tuple(sorted(self.lextokens))))\n            tf.write('_lexreflags   = %s\\n' % repr(int(self.lexreflags)))\n            tf.write('_lexliterals  = %s\\n' % repr(self.lexliterals))\n            tf.write('_lexstateinfo = %s\\n' % repr(self.lexstateinfo))\n\n            # Rewrite the lexstatere table, replacing function objects with function names\n            tabre = {}\n            for statename, lre in self.lexstatere.items():\n                titem = []\n                for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):\n                    titem.append((retext, _funcs_to_names(func, renames)))\n                tabre[statename] = titem\n\n            tf.write('_lexstatere   = %s\\n' % repr(tabre))\n            tf.write('_lexstateignore = %s\\n' % repr(self.lexstateignore))\n\n            taberr = {}\n            for statename, ef in self.lexstateerrorf.items():\n                taberr[statename] = ef.__name__ if ef else None\n            tf.write('_lexstateerrorf = %s\\n' % repr(taberr))\n\n            tabeof = {}\n            for statename, ef in self.lexstateeoff.items():\n                tabeof[statename] = ef.__name__ if ef else None\n            tf.write('_lexstateeoff = %s\\n' % repr(tabeof))\n\n    # ------------------------------------------------------------\n    # readtab() - Read lexer information from a tab file\n    # ------------------------------------------------------------\n    def readtab(self, tabfile, fdict):\n        if isinstance(tabfile, types.ModuleType):\n            lextab = tabfile\n        else:\n            exec('import %s' % tabfile)\n            lextab = sys.modules[tabfile]\n\n        if getattr(lextab, '_tabversion', '0.0') != __tabversion__:\n            raise ImportError('Inconsistent PLY version')\n\n        self.lextokens      = lextab._lextokens\n        self.lexreflags     = lextab._lexreflags\n        self.lexliterals    = lextab._lexliterals\n        self.lextokens_all  = self.lextokens | set(self.lexliterals)\n        self.lexstateinfo   = lextab._lexstateinfo\n        self.lexstateignore = lextab._lexstateignore\n        self.lexstatere     = {}\n        self.lexstateretext = {}\n        for statename, lre in lextab._lexstatere.items():\n            titem = []\n            txtitem = []\n            for pat, func_name in lre:\n                titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))\n\n            self.lexstatere[statename] = titem\n            self.lexstateretext[statename] = txtitem\n\n        self.lexstateerrorf = {}\n        for statename, ef in lextab._lexstateerrorf.items():\n            self.lexstateerrorf[statename] = fdict[ef]\n\n        self.lexstateeoff = {}\n        for statename, ef in lextab._lexstateeoff.items():\n            self.lexstateeoff[statename] = fdict[ef]\n\n        self.begin('INITIAL')\n\n    # ------------------------------------------------------------\n    # input() - Push a new string into the lexer\n    # ------------------------------------------------------------\n    def input(self, s):\n        # Pull off the first character to see if s looks like a string\n        c = s[:1]\n        if not isinstance(c, StringTypes):\n            raise ValueError('Expected a string')\n        self.lexdata = s\n        self.lexpos = 0\n        self.lexlen = len(s)\n\n    # ------------------------------------------------------------\n    # begin() - Changes the lexing state\n    # ------------------------------------------------------------\n    def begin(self, state):\n        if state not in self.lexstatere:\n            raise ValueError('Undefined state')\n        self.lexre = self.lexstatere[state]\n        self.lexretext = self.lexstateretext[state]\n        self.lexignore = self.lexstateignore.get(state, '')\n        self.lexerrorf = self.lexstateerrorf.get(state, None)\n        self.lexeoff = self.lexstateeoff.get(state, None)\n        self.lexstate = state\n\n    # ------------------------------------------------------------\n    # push_state() - Changes the lexing state and saves old on stack\n    # ------------------------------------------------------------\n    def push_state(self, state):\n        self.lexstatestack.append(self.lexstate)\n        self.begin(state)\n\n    # ------------------------------------------------------------\n    # pop_state() - Restores the previous state\n    # ------------------------------------------------------------\n    def pop_state(self):\n        self.begin(self.lexstatestack.pop())\n\n    # ------------------------------------------------------------\n    # current_state() - Returns the current lexing state\n    # ------------------------------------------------------------\n    def current_state(self):\n        return self.lexstate\n\n    # ------------------------------------------------------------\n    # skip() - Skip ahead n characters\n    # ------------------------------------------------------------\n    def skip(self, n):\n        self.lexpos += n\n\n    # ------------------------------------------------------------\n    # opttoken() - Return the next token from the Lexer\n    #\n    # Note: This function has been carefully implemented to be as fast\n    # as possible.  Don't make changes unless you really know what\n    # you are doing\n    # ------------------------------------------------------------\n    def token(self):\n        # Make local copies of frequently referenced attributes\n        lexpos    = self.lexpos\n        lexlen    = self.lexlen\n        lexignore = self.lexignore\n        lexdata   = self.lexdata\n\n        while lexpos < lexlen:\n            # This code provides some short-circuit code for whitespace, tabs, and other ignored characters\n            if lexdata[lexpos] in lexignore:\n                lexpos += 1\n                continue\n\n            # Look for a regular expression match\n            for lexre, lexindexfunc in self.lexre:\n                m = lexre.match(lexdata, lexpos)\n                if not m:\n                    continue\n\n                # Create a token for return\n                tok = LexToken()\n                tok.value = m.group()\n                tok.lineno = self.lineno\n                tok.lexpos = lexpos\n\n                i = m.lastindex\n                func, tok.type = lexindexfunc[i]\n\n                if not func:\n                    # If no token type was set, it's an ignored token\n                    if tok.type:\n                        self.lexpos = m.end()\n                        return tok\n                    else:\n                        lexpos = m.end()\n                        break\n\n                lexpos = m.end()\n\n                # If token is processed by a function, call it\n\n                tok.lexer = self      # Set additional attributes useful in token rules\n                self.lexmatch = m\n                self.lexpos = lexpos\n\n                newtok = func(tok)\n\n                # Every function must return a token, if nothing, we just move to next token\n                if not newtok:\n                    lexpos    = self.lexpos         # This is here in case user has updated lexpos.\n                    lexignore = self.lexignore      # This is here in case there was a state change\n                    break\n\n                # Verify type of the token.  If not in the token map, raise an error\n                if not self.lexoptimize:\n                    if newtok.type not in self.lextokens_all:\n                        raise LexError(\"%s:%d: Rule '%s' returned an unknown token type '%s'\" % (\n                            func.__code__.co_filename, func.__code__.co_firstlineno,\n                            func.__name__, newtok.type), lexdata[lexpos:])\n\n                return newtok\n            else:\n                # No match, see if in literals\n                if lexdata[lexpos] in self.lexliterals:\n                    tok = LexToken()\n                    tok.value = lexdata[lexpos]\n                    tok.lineno = self.lineno\n                    tok.type = tok.value\n                    tok.lexpos = lexpos\n                    self.lexpos = lexpos + 1\n                    return tok\n\n                # No match. Call t_error() if defined.\n                if self.lexerrorf:\n                    tok = LexToken()\n                    tok.value = self.lexdata[lexpos:]\n                    tok.lineno = self.lineno\n                    tok.type = 'error'\n                    tok.lexer = self\n                    tok.lexpos = lexpos\n                    self.lexpos = lexpos\n                    newtok = self.lexerrorf(tok)\n                    if lexpos == self.lexpos:\n                        # Error method didn't change text position at all. This is an error.\n                        raise LexError(\"Scanning error. Illegal character '%s'\" % (lexdata[lexpos]), lexdata[lexpos:])\n                    lexpos = self.lexpos\n                    if not newtok:\n                        continue\n                    return newtok\n\n                self.lexpos = lexpos\n                raise LexError(\"Illegal character '%s' at index %d\" % (lexdata[lexpos], lexpos), lexdata[lexpos:])\n\n        if self.lexeoff:\n            tok = LexToken()\n            tok.type = 'eof'\n            tok.value = ''\n            tok.lineno = self.lineno\n            tok.lexpos = lexpos\n            tok.lexer = self\n            self.lexpos = lexpos\n            newtok = self.lexeoff(tok)\n            return newtok\n\n        self.lexpos = lexpos + 1\n        if self.lexdata is None:\n            raise RuntimeError('No input string given with input()')\n        return None\n\n    # Iterator interface\n    def __iter__(self):\n        return self\n\n    def next(self):\n        t = self.token()\n        if t is None:\n            raise StopIteration\n        return t\n\n    __next__ = next\n\n# -----------------------------------------------------------------------------\n#                           ==== Lex Builder ===\n#\n# The functions and classes below are used to collect lexing information\n# and build a Lexer object from it.\n# -----------------------------------------------------------------------------\n\n# -----------------------------------------------------------------------------\n# _get_regex(func)\n#\n# Returns the regular expression assigned to a function either as a doc string\n# or as a .regex attribute attached by the @TOKEN decorator.\n# -----------------------------------------------------------------------------\ndef _get_regex(func):\n    return getattr(func, 'regex', func.__doc__)\n\n# -----------------------------------------------------------------------------\n# get_caller_module_dict()\n#\n# This function returns a dictionary containing all of the symbols defined within\n# a caller further down the call stack.  This is used to get the environment\n# associated with the yacc() call if none was provided.\n# -----------------------------------------------------------------------------\ndef get_caller_module_dict(levels):\n    f = sys._getframe(levels)\n    ldict = f.f_globals.copy()\n    if f.f_globals != f.f_locals:\n        ldict.update(f.f_locals)\n    return ldict\n\n# -----------------------------------------------------------------------------\n# _funcs_to_names()\n#\n# Given a list of regular expression functions, this converts it to a list\n# suitable for output to a table file\n# -----------------------------------------------------------------------------\ndef _funcs_to_names(funclist, namelist):\n    result = []\n    for f, name in zip(funclist, namelist):\n        if f and f[0]:\n            result.append((name, f[1]))\n        else:\n            result.append(f)\n    return result\n\n# -----------------------------------------------------------------------------\n# _names_to_funcs()\n#\n# Given a list of regular expression function names, this converts it back to\n# functions.\n# -----------------------------------------------------------------------------\ndef _names_to_funcs(namelist, fdict):\n    result = []\n    for n in namelist:\n        if n and n[0]:\n            result.append((fdict[n[0]], n[1]))\n        else:\n            result.append(n)\n    return result\n\n# -----------------------------------------------------------------------------\n# _form_master_re()\n#\n# This function takes a list of all of the regex components and attempts to\n# form the master regular expression.  Given limitations in the Python re\n# module, it may be necessary to break the master regex into separate expressions.\n# -----------------------------------------------------------------------------\ndef _form_master_re(relist, reflags, ldict, toknames):\n    if not relist:\n        return []\n    regex = '|'.join(relist)\n    try:\n        lexre = re.compile(regex, reflags)\n\n        # Build the index to function map for the matching engine\n        lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)\n        lexindexnames = lexindexfunc[:]\n\n        for f, i in lexre.groupindex.items():\n            handle = ldict.get(f, None)\n            if type(handle) in (types.FunctionType, types.MethodType):\n                lexindexfunc[i] = (handle, toknames[f])\n                lexindexnames[i] = f\n            elif handle is not None:\n                lexindexnames[i] = f\n                if f.find('ignore_') > 0:\n                    lexindexfunc[i] = (None, None)\n                else:\n                    lexindexfunc[i] = (None, toknames[f])\n\n        return [(lexre, lexindexfunc)], [regex], [lexindexnames]\n    except Exception:\n        m = int(len(relist)/2)\n        if m == 0:\n            m = 1\n        llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)\n        rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)\n        return (llist+rlist), (lre+rre), (lnames+rnames)\n\n# -----------------------------------------------------------------------------\n# def _statetoken(s,names)\n#\n# Given a declaration name s of the form \"t_\" and a dictionary whose keys are\n# state names, this function returns a tuple (states,tokenname) where states\n# is a tuple of state names and tokenname is the name of the token.  For example,\n# calling this with s = \"t_foo_bar_SPAM\" might return (('foo','bar'),'SPAM')\n# -----------------------------------------------------------------------------\ndef _statetoken(s, names):\n    parts = s.split('_')\n    for i, part in enumerate(parts[1:], 1):\n        if part not in names and part != 'ANY':\n            break\n\n    if i > 1:\n        states = tuple(parts[1:i])\n    else:\n        states = ('INITIAL',)\n\n    if 'ANY' in states:\n        states = tuple(names)\n\n    tokenname = '_'.join(parts[i:])\n    return (states, tokenname)\n\n\n# -----------------------------------------------------------------------------\n# LexerReflect()\n#\n# This class represents information needed to build a lexer as extracted from a\n# user's input file.\n# -----------------------------------------------------------------------------\nclass LexerReflect(object):\n    def __init__(self, ldict, log=None, reflags=0):\n        self.ldict      = ldict\n        self.error_func = None\n        self.tokens     = []\n        self.reflags    = reflags\n        self.stateinfo  = {'INITIAL': 'inclusive'}\n        self.modules    = set()\n        self.error      = False\n        self.log        = PlyLogger(sys.stderr) if log is None else log\n\n    # Get all of the basic information\n    def get_all(self):\n        self.get_tokens()\n        self.get_literals()\n        self.get_states()\n        self.get_rules()\n\n    # Validate all of the information\n    def validate_all(self):\n        self.validate_tokens()\n        self.validate_literals()\n        self.validate_rules()\n        return self.error\n\n    # Get the tokens map\n    def get_tokens(self):\n        tokens = self.ldict.get('tokens', None)\n        if not tokens:\n            self.log.error('No token list is defined')\n            self.error = True\n            return\n\n        if not isinstance(tokens, (list, tuple)):\n            self.log.error('tokens must be a list or tuple')\n            self.error = True\n            return\n\n        if not tokens:\n            self.log.error('tokens is empty')\n            self.error = True\n            return\n\n        self.tokens = tokens\n\n    # Validate the tokens\n    def validate_tokens(self):\n        terminals = {}\n        for n in self.tokens:\n            if not _is_identifier.match(n):\n                self.log.error(\"Bad token name '%s'\", n)\n                self.error = True\n            if n in terminals:\n                self.log.warning(\"Token '%s' multiply defined\", n)\n            terminals[n] = 1\n\n    # Get the literals specifier\n    def get_literals(self):\n        self.literals = self.ldict.get('literals', '')\n        if not self.literals:\n            self.literals = ''\n\n    # Validate literals\n    def validate_literals(self):\n        try:\n            for c in self.literals:\n                if not isinstance(c, StringTypes) or len(c) > 1:\n                    self.log.error('Invalid literal %s. Must be a single character', repr(c))\n                    self.error = True\n\n        except TypeError:\n            self.log.error('Invalid literals specification. literals must be a sequence of characters')\n            self.error = True\n\n    def get_states(self):\n        self.states = self.ldict.get('states', None)\n        # Build statemap\n        if self.states:\n            if not isinstance(self.states, (tuple, list)):\n                self.log.error('states must be defined as a tuple or list')\n                self.error = True\n            else:\n                for s in self.states:\n                    if not isinstance(s, tuple) or len(s) != 2:\n                        self.log.error(\"Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')\", repr(s))\n                        self.error = True\n                        continue\n                    name, statetype = s\n                    if not isinstance(name, StringTypes):\n                        self.log.error('State name %s must be a string', repr(name))\n                        self.error = True\n                        continue\n                    if not (statetype == 'inclusive' or statetype == 'exclusive'):\n                        self.log.error(\"State type for state %s must be 'inclusive' or 'exclusive'\", name)\n                        self.error = True\n                        continue\n                    if name in self.stateinfo:\n                        self.log.error(\"State '%s' already defined\", name)\n                        self.error = True\n                        continue\n                    self.stateinfo[name] = statetype\n\n    # Get all of the symbols with a t_ prefix and sort them into various\n    # categories (functions, strings, error functions, and ignore characters)\n\n    def get_rules(self):\n        tsymbols = [f for f in self.ldict if f[:2] == 't_']\n\n        # Now build up a list of functions and a list of strings\n        self.toknames = {}        # Mapping of symbols to token names\n        self.funcsym  = {}        # Symbols defined as functions\n        self.strsym   = {}        # Symbols defined as strings\n        self.ignore   = {}        # Ignore strings by state\n        self.errorf   = {}        # Error functions by state\n        self.eoff     = {}        # EOF functions by state\n\n        for s in self.stateinfo:\n            self.funcsym[s] = []\n            self.strsym[s] = []\n\n        if len(tsymbols) == 0:\n            self.log.error('No rules of the form t_rulename are defined')\n            self.error = True\n            return\n\n        for f in tsymbols:\n            t = self.ldict[f]\n            states, tokname = _statetoken(f, self.stateinfo)\n            self.toknames[f] = tokname\n\n            if hasattr(t, '__call__'):\n                if tokname == 'error':\n                    for s in states:\n                        self.errorf[s] = t\n                elif tokname == 'eof':\n                    for s in states:\n                        self.eoff[s] = t\n                elif tokname == 'ignore':\n                    line = t.__code__.co_firstlineno\n                    file = t.__code__.co_filename\n                    self.log.error(\"%s:%d: Rule '%s' must be defined as a string\", file, line, t.__name__)\n                    self.error = True\n                else:\n                    for s in states:\n                        self.funcsym[s].append((f, t))\n            elif isinstance(t, StringTypes):\n                if tokname == 'ignore':\n                    for s in states:\n                        self.ignore[s] = t\n                    if '\\\\' in t:\n                        self.log.warning(\"%s contains a literal backslash '\\\\'\", f)\n\n                elif tokname == 'error':\n                    self.log.error(\"Rule '%s' must be defined as a function\", f)\n                    self.error = True\n                else:\n                    for s in states:\n                        self.strsym[s].append((f, t))\n            else:\n                self.log.error('%s not defined as a function or string', f)\n                self.error = True\n\n        # Sort the functions by line number\n        for f in self.funcsym.values():\n            f.sort(key=lambda x: x[1].__code__.co_firstlineno)\n\n        # Sort the strings by regular expression length\n        for s in self.strsym.values():\n            s.sort(key=lambda x: len(x[1]), reverse=True)\n\n    # Validate all of the t_rules collected\n    def validate_rules(self):\n        for state in self.stateinfo:\n            # Validate all rules defined by functions\n\n            for fname, f in self.funcsym[state]:\n                line = f.__code__.co_firstlineno\n                file = f.__code__.co_filename\n                module = inspect.getmodule(f)\n                self.modules.add(module)\n\n                tokname = self.toknames[fname]\n                if isinstance(f, types.MethodType):\n                    reqargs = 2\n                else:\n                    reqargs = 1\n                nargs = f.__code__.co_argcount\n                if nargs > reqargs:\n                    self.log.error(\"%s:%d: Rule '%s' has too many arguments\", file, line, f.__name__)\n                    self.error = True\n                    continue\n\n                if nargs < reqargs:\n                    self.log.error(\"%s:%d: Rule '%s' requires an argument\", file, line, f.__name__)\n                    self.error = True\n                    continue\n\n                if not _get_regex(f):\n                    self.log.error(\"%s:%d: No regular expression defined for rule '%s'\", file, line, f.__name__)\n                    self.error = True\n                    continue\n\n                try:\n                    c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)\n                    if c.match(''):\n                        self.log.error(\"%s:%d: Regular expression for rule '%s' matches empty string\", file, line, f.__name__)\n                        self.error = True\n                except re.error as e:\n                    self.log.error(\"%s:%d: Invalid regular expression for rule '%s'. %s\", file, line, f.__name__, e)\n                    if '#' in _get_regex(f):\n                        self.log.error(\"%s:%d. Make sure '#' in rule '%s' is escaped with '\\\\#'\", file, line, f.__name__)\n                    self.error = True\n\n            # Validate all rules defined by strings\n            for name, r in self.strsym[state]:\n                tokname = self.toknames[name]\n                if tokname == 'error':\n                    self.log.error(\"Rule '%s' must be defined as a function\", name)\n                    self.error = True\n                    continue\n\n                if tokname not in self.tokens and tokname.find('ignore_') < 0:\n                    self.log.error(\"Rule '%s' defined for an unspecified token %s\", name, tokname)\n                    self.error = True\n                    continue\n\n                try:\n                    c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)\n                    if (c.match('')):\n                        self.log.error(\"Regular expression for rule '%s' matches empty string\", name)\n                        self.error = True\n                except re.error as e:\n                    self.log.error(\"Invalid regular expression for rule '%s'. %s\", name, e)\n                    if '#' in r:\n                        self.log.error(\"Make sure '#' in rule '%s' is escaped with '\\\\#'\", name)\n                    self.error = True\n\n            if not self.funcsym[state] and not self.strsym[state]:\n                self.log.error(\"No rules defined for state '%s'\", state)\n                self.error = True\n\n            # Validate the error function\n            efunc = self.errorf.get(state, None)\n            if efunc:\n                f = efunc\n                line = f.__code__.co_firstlineno\n                file = f.__code__.co_filename\n                module = inspect.getmodule(f)\n                self.modules.add(module)\n\n                if isinstance(f, types.MethodType):\n                    reqargs = 2\n                else:\n                    reqargs = 1\n                nargs = f.__code__.co_argcount\n                if nargs > reqargs:\n                    self.log.error(\"%s:%d: Rule '%s' has too many arguments\", file, line, f.__name__)\n                    self.error = True\n\n                if nargs < reqargs:\n                    self.log.error(\"%s:%d: Rule '%s' requires an argument\", file, line, f.__name__)\n                    self.error = True\n\n        for module in self.modules:\n            self.validate_module(module)\n\n    # -----------------------------------------------------------------------------\n    # validate_module()\n    #\n    # This checks to see if there are duplicated t_rulename() functions or strings\n    # in the parser input file.  This is done using a simple regular expression\n    # match on each line in the source code of the given module.\n    # -----------------------------------------------------------------------------\n\n    def validate_module(self, module):\n        try:\n            lines, linen = inspect.getsourcelines(module)\n        except IOError:\n            return\n\n        fre = re.compile(r'\\s*def\\s+(t_[a-zA-Z_0-9]*)\\(')\n        sre = re.compile(r'\\s*(t_[a-zA-Z_0-9]*)\\s*=')\n\n        counthash = {}\n        linen += 1\n        for line in lines:\n            m = fre.match(line)\n            if not m:\n                m = sre.match(line)\n            if m:\n                name = m.group(1)\n                prev = counthash.get(name)\n                if not prev:\n                    counthash[name] = linen\n                else:\n                    filename = inspect.getsourcefile(module)\n                    self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)\n                    self.error = True\n            linen += 1\n\n# -----------------------------------------------------------------------------\n# lex(module)\n#\n# Build all of the regular expression rules from definitions in the supplied module\n# -----------------------------------------------------------------------------\ndef lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',\n        reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None, cls=Lexer):  # <mod NL>\n\n    if lextab is None:\n        lextab = 'lextab'\n\n    global lexer\n\n    ldict = None\n    stateinfo  = {'INITIAL': 'inclusive'}\n    lexobj = cls()  # <mod NL>\n    lexobj.lexoptimize = optimize\n    global token, input\n\n    if errorlog is None:\n        errorlog = PlyLogger(sys.stderr)\n\n    if debug:\n        if debuglog is None:\n            debuglog = PlyLogger(sys.stderr)\n\n    # Get the module dictionary used for the lexer\n    if object:\n        module = object\n\n    # Get the module dictionary used for the parser\n    if module:\n        _items = [(k, getattr(module, k)) for k in dir(module)]\n        ldict = dict(_items)\n        # If no __file__ attribute is available, try to obtain it from the __module__ instead\n        if '__file__' not in ldict:\n            ldict['__file__'] = sys.modules[ldict['__module__']].__file__\n    else:\n        ldict = get_caller_module_dict(2)\n\n    # Determine if the module is package of a package or not.\n    # If so, fix the tabmodule setting so that tables load correctly\n    pkg = ldict.get('__package__')\n    if pkg and isinstance(lextab, str):\n        if '.' not in lextab:\n            lextab = pkg + '.' + lextab\n\n    # Collect parser information from the dictionary\n    linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)\n    linfo.get_all()\n    if not optimize:\n        if linfo.validate_all():\n            raise SyntaxError(\"Can't build lexer\")\n\n    if optimize and lextab:\n        try:\n            lexobj.readtab(lextab, ldict)\n            token = lexobj.token\n            input = lexobj.input\n            lexer = lexobj\n            return lexobj\n\n        except ImportError:\n            pass\n\n    # Dump some basic debugging information\n    if debug:\n        debuglog.info('lex: tokens   = %r', linfo.tokens)\n        debuglog.info('lex: literals = %r', linfo.literals)\n        debuglog.info('lex: states   = %r', linfo.stateinfo)\n\n    # Build a dictionary of valid token names\n    lexobj.lextokens = set()\n    for n in linfo.tokens:\n        lexobj.lextokens.add(n)\n\n    # Get literals specification\n    if isinstance(linfo.literals, (list, tuple)):\n        lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)\n    else:\n        lexobj.lexliterals = linfo.literals\n\n    lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)\n\n    # Get the stateinfo dictionary\n    stateinfo = linfo.stateinfo\n\n    regexs = {}\n    # Build the master regular expressions\n    for state in stateinfo:\n        regex_list = []\n\n        # Add rules defined by functions first\n        for fname, f in linfo.funcsym[state]:\n            regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))\n            if debug:\n                debuglog.info(\"lex: Adding rule %s -> '%s' (state '%s')\", fname, _get_regex(f), state)\n\n        # Now add all of the simple rules\n        for name, r in linfo.strsym[state]:\n            regex_list.append('(?P<%s>%s)' % (name, r))\n            if debug:\n                debuglog.info(\"lex: Adding rule %s -> '%s' (state '%s')\", name, r, state)\n\n        regexs[state] = regex_list\n\n    # Build the master regular expressions\n\n    if debug:\n        debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')\n\n    for state in regexs:\n        lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)\n        lexobj.lexstatere[state] = lexre\n        lexobj.lexstateretext[state] = re_text\n        lexobj.lexstaterenames[state] = re_names\n        if debug:\n            for i, text in enumerate(re_text):\n                debuglog.info(\"lex: state '%s' : regex[%d] = '%s'\", state, i, text)\n\n    # For inclusive states, we need to add the regular expressions from the INITIAL state\n    for state, stype in stateinfo.items():\n        if state != 'INITIAL' and stype == 'inclusive':\n            lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])\n            lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])\n            lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])\n\n    lexobj.lexstateinfo = stateinfo\n    lexobj.lexre = lexobj.lexstatere['INITIAL']\n    lexobj.lexretext = lexobj.lexstateretext['INITIAL']\n    lexobj.lexreflags = reflags\n\n    # Set up ignore variables\n    lexobj.lexstateignore = linfo.ignore\n    lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')\n\n    # Set up error functions\n    lexobj.lexstateerrorf = linfo.errorf\n    lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)\n    if not lexobj.lexerrorf:\n        errorlog.warning('No t_error rule is defined')\n\n    # Set up eof functions\n    lexobj.lexstateeoff = linfo.eoff\n    lexobj.lexeoff = linfo.eoff.get('INITIAL', None)\n\n    # Check state information for ignore and error rules\n    for s, stype in stateinfo.items():\n        if stype == 'exclusive':\n            if s not in linfo.errorf:\n                errorlog.warning(\"No error rule is defined for exclusive state '%s'\", s)\n            if s not in linfo.ignore and lexobj.lexignore:\n                errorlog.warning(\"No ignore rule is defined for exclusive state '%s'\", s)\n        elif stype == 'inclusive':\n            if s not in linfo.errorf:\n                linfo.errorf[s] = linfo.errorf.get('INITIAL', None)\n            if s not in linfo.ignore:\n                linfo.ignore[s] = linfo.ignore.get('INITIAL', '')\n\n    # Create global versions of the token() and input() functions\n    token = lexobj.token\n    input = lexobj.input\n    lexer = lexobj\n\n    # If in optimize mode, we write the lextab\n    if lextab and optimize:\n        if outputdir is None:\n            # If no output directory is set, the location of the output files\n            # is determined according to the following rules:\n            #     - If lextab specifies a package, files go into that package directory\n            #     - Otherwise, files go in the same directory as the specifying module\n            if isinstance(lextab, types.ModuleType):\n                srcfile = lextab.__file__\n            else:\n                if '.' not in lextab:\n                    srcfile = ldict['__file__']\n                else:\n                    parts = lextab.split('.')\n                    pkgname = '.'.join(parts[:-1])\n                    exec('import %s' % pkgname)\n                    srcfile = getattr(sys.modules[pkgname], '__file__', '')\n            outputdir = os.path.dirname(srcfile)\n        try:\n            lexobj.writetab(lextab, outputdir)\n            if lextab in sys.modules:\n                del sys.modules[lextab]\n        except IOError as e:\n            errorlog.warning(\"Couldn't write lextab module %r. %s\" % (lextab, e))\n\n    return lexobj\n\n# -----------------------------------------------------------------------------\n# runmain()\n#\n# This runs the lexer as a main program\n# -----------------------------------------------------------------------------\n\ndef runmain(lexer=None, data=None):\n    if not data:\n        try:\n            filename = sys.argv[1]\n            f = open(filename)\n            data = f.read()\n            f.close()\n        except IndexError:\n            sys.stdout.write('Reading from standard input (type EOF to end):\\n')\n            data = sys.stdin.read()\n\n    if lexer:\n        _input = lexer.input\n    else:\n        _input = input\n    _input(data)\n    if lexer:\n        _token = lexer.token\n    else:\n        _token = token\n\n    while True:\n        tok = _token()\n        if not tok:\n            break\n        sys.stdout.write('(%s,%r,%d,%d)\\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))\n\n# -----------------------------------------------------------------------------\n# @TOKEN(regex)\n#\n# This decorator function can be used to set the regex expression on a function\n# when its docstring might need to be set in an alternative way\n# -----------------------------------------------------------------------------\n\ndef TOKEN(r):\n    def set_regex(f):\n        if hasattr(r, '__call__'):\n            f.regex = _get_regex(r)\n        else:\n            f.regex = r\n        return f\n    return set_regex\n\n# Alternative spelling of the TOKEN decorator\nToken = TOKEN\n"
  },
  {
    "path": "ctypesgen/parser/lextab.py",
    "content": "# lextab.py. This file automatically created by PLY (version 3.11). Don't edit!\n_tabversion   = '3.10'\n_lextokens    = set(('ADD_ASSIGN', 'AND', 'AND_ASSIGN', 'AND_OP', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CHARACTER_CONSTANT', 'COLON', 'COMMA', 'CONDOP', 'CONST', 'CONTINUE', 'DEC_OP', 'DEFAULT', 'DIVIDE', 'DIV_ASSIGN', 'DO', 'DOUBLE', 'ELLIPSIS', 'ELSE', 'ENUM', 'EQUALS', 'EQ_OP', 'EXTERN', 'FLOAT', 'FOR', 'F_CONST_1', 'F_CONST_2', 'F_CONST_3', 'F_CONST_4', 'F_CONST_5', 'F_CONST_6', 'GE_OP', 'GOTO', 'GT', 'IDENTIFIER', 'IF', 'INC_OP', 'INT', 'I_CONST_BIN', 'I_CONST_DEC', 'I_CONST_HEX', 'I_CONST_OCT', 'LBRACE', 'LBRACKET', 'LEFT_ASSIGN', 'LEFT_OP', 'LE_OP', 'LNOT', 'LONG', 'LPAREN', 'LT', 'MINUS', 'MOD', 'MOD_ASSIGN', 'MUL_ASSIGN', 'NE_OP', 'NOT', 'OR', 'OR_ASSIGN', 'OR_OP', 'PERIOD', 'PLUS', 'PP_DEFINE', 'PP_DEFINE_MACRO_NAME', 'PP_DEFINE_NAME', 'PP_END_DEFINE', 'PP_IDENTIFIER_PASTE', 'PP_MACRO_PARAM', 'PP_STRINGIFY', 'PP_UNDEFINE', 'PRAGMA', 'PRAGMA_END', 'PRAGMA_PACK', 'PTR_OP', 'RBRACE', 'RBRACKET', 'REGISTER', 'RESTRICT', 'RETURN', 'RIGHT_ASSIGN', 'RIGHT_OP', 'RPAREN', 'SEMI', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRING_LITERAL', 'STRUCT', 'SUB_ASSIGN', 'SWITCH', 'TIMES', 'TYPEDEF', 'TYPE_NAME', 'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WHILE', 'XOR', 'XOR_ASSIGN', '_BOOL', '__ASM__', '__ATTRIBUTE__'))\n_lexreflags   = 64\n_lexliterals  = ''\n_lexstateinfo = {'INITIAL': 'inclusive', 'DEFINE': 'exclusive', 'PRAGMA': 'exclusive'}\n_lexstatere   = {'INITIAL': [('(?P<t_ANY_directive>\\\\#\\\\s+(?P<lineno>\\\\d+)\\\\s+\"(?P<filename>[^\"]+)\"[ \\\\d]*\\\\n)|(?P<t_ANY_f_const_1>(?P<sig>[0-9]+)(?P<exp>([Ee][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_directive', 'directive'), None, None, ('t_ANY_f_const_1', 'f_const_1'), None, None]), ('(?P<t_ANY_f_const_2>(?P<sig>[0-9]*\\\\.[0-9]+)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)', [None, ('t_ANY_f_const_2', 'f_const_2'), None, None]), ('(?P<t_ANY_f_const_3>(?P<sig>[0-9]+\\\\.)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)', [None, ('t_ANY_f_const_3', 'f_const_3'), None, None]), ('(?P<t_ANY_f_const_4>(?P<hex>0[xX][a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_4', 'f_const_4'), None]), ('(?P<t_ANY_f_const_5>(?P<hex>0[xX][a-fA-F0-9]*\\\\.[a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_5', 'f_const_5'), None]), ('(?P<t_ANY_f_const_6>(?P<hex>0[xX][a-fA-F0-9]+\\\\.([Pp][+-]?[0-9]+))(f|F|l|L)?)|(?P<t_ANY_i_const_bin>(?P<p1>0[bB][0-1]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_f_const_6', 'f_const_6'), None, None, None, ('t_ANY_i_const_bin', 'i_const_bin'), None]), ('(?P<t_ANY_i_const_hex>(?P<p1>0[xX][a-fA-F0-9]+)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_hex', 'i_const_hex'), None]), ('(?P<t_ANY_i_const_dec>(?P<p1>[1-9][0-9]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_dec', 'i_const_dec'), None]), ('(?P<t_ANY_i_const_oct>0(?P<p1>[0-7]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_oct', 'i_const_oct'), None]), (\"(?P<t_ANY_character_constant>(u8|u|U|L)?'(?P<p1>\\\\\\\\.|[^\\\\\\\\'])+')\", [None, ('t_ANY_character_constant', 'character_constant'), None, None]), ('(?P<t_ANY_string_literal>\"([^\"\\\\\\\\\\\\n]|(\\\\\\\\[0-9a-zA-Z._~!=&\\\\^\\\\-\\\\\\\\?\\'\"]))*\")|(?P<t_INITIAL_identifier>[a-zA-Z_][a-zA-Z_0-9]*)|(?P<t_INITIAL_newline>\\\\n)|(?P<t_INITIAL_pp_undefine>\\\\#undef)', [None, ('t_ANY_string_literal', 'string_literal'), None, None, ('t_INITIAL_identifier', 'identifier'), ('t_INITIAL_newline', 'newline'), ('t_INITIAL_pp_undefine', 'pp_undefine')]), ('(?P<t_INITIAL_pp_define>\\\\#define)|(?P<t_INITIAL_pragma>\\\\#pragma)|(?P<t_ANY_ELLIPSIS>\\\\.\\\\.\\\\.)|(?P<t_ANY_INC_OP>\\\\+\\\\+)|(?P<t_ANY_OR_OP>\\\\|\\\\|)|(?P<t_ANY_ADD_ASSIGN>\\\\+=)|(?P<t_ANY_LEFT_ASSIGN><<=)|(?P<t_ANY_MUL_ASSIGN>\\\\*=)|(?P<t_ANY_OR_ASSIGN>\\\\|=)|(?P<t_ANY_RIGHT_ASSIGN>>>=)|(?P<t_ANY_XOR_ASSIGN>\\\\^=)|(?P<t_ANY_AND_ASSIGN>&=)|(?P<t_ANY_AND_OP>&&)|(?P<t_ANY_CONDOP>\\\\?)|(?P<t_ANY_DEC_OP>--)|(?P<t_ANY_DIV_ASSIGN>/=)', [None, ('t_INITIAL_pp_define', 'pp_define'), ('t_INITIAL_pragma', 'pragma'), (None, 'ELLIPSIS'), (None, 'INC_OP'), (None, 'OR_OP'), (None, 'ADD_ASSIGN'), (None, 'LEFT_ASSIGN'), (None, 'MUL_ASSIGN'), (None, 'OR_ASSIGN'), (None, 'RIGHT_ASSIGN'), (None, 'XOR_ASSIGN'), (None, 'AND_ASSIGN'), (None, 'AND_OP'), (None, 'CONDOP'), (None, 'DEC_OP'), (None, 'DIV_ASSIGN')]), ('(?P<t_ANY_EQ_OP>==)|(?P<t_ANY_GE_OP>>=)|(?P<t_ANY_LBRACE>\\\\{)|(?P<t_ANY_LBRACKET>\\\\[)|(?P<t_ANY_LEFT_OP><<)|(?P<t_ANY_LE_OP><=)|(?P<t_ANY_LPAREN>\\\\()|(?P<t_ANY_MOD_ASSIGN>%=)|(?P<t_ANY_NE_OP>!=)|(?P<t_ANY_OR>\\\\|)|(?P<t_ANY_PERIOD>\\\\.)|(?P<t_ANY_PLUS>\\\\+)|(?P<t_ANY_PTR_OP>->)|(?P<t_ANY_RBRACE>\\\\})|(?P<t_ANY_RBRACKET>\\\\])|(?P<t_ANY_RIGHT_OP>>>)|(?P<t_ANY_RPAREN>\\\\))|(?P<t_ANY_SUB_ASSIGN>-=)|(?P<t_ANY_TIMES>\\\\*)|(?P<t_ANY_XOR>\\\\^)|(?P<t_ANY_AND>&)|(?P<t_ANY_COLON>:)|(?P<t_ANY_COMMA>,)|(?P<t_ANY_DIVIDE>/)|(?P<t_ANY_EQUALS>=)|(?P<t_ANY_GT>>)|(?P<t_ANY_LNOT>!)|(?P<t_ANY_LT><)|(?P<t_ANY_MINUS>-)|(?P<t_ANY_MOD>%)|(?P<t_ANY_NOT>~)|(?P<t_ANY_SEMI>;)', [None, (None, 'EQ_OP'), (None, 'GE_OP'), (None, 'LBRACE'), (None, 'LBRACKET'), (None, 'LEFT_OP'), (None, 'LE_OP'), (None, 'LPAREN'), (None, 'MOD_ASSIGN'), (None, 'NE_OP'), (None, 'OR'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'PTR_OP'), (None, 'RBRACE'), (None, 'RBRACKET'), (None, 'RIGHT_OP'), (None, 'RPAREN'), (None, 'SUB_ASSIGN'), (None, 'TIMES'), (None, 'XOR'), (None, 'AND'), (None, 'COLON'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQUALS'), (None, 'GT'), (None, 'LNOT'), (None, 'LT'), (None, 'MINUS'), (None, 'MOD'), (None, 'NOT'), (None, 'SEMI')])], 'DEFINE': [('(?P<t_ANY_directive>\\\\#\\\\s+(?P<lineno>\\\\d+)\\\\s+\"(?P<filename>[^\"]+)\"[ \\\\d]*\\\\n)', [None, ('t_ANY_directive', 'directive'), None, None]), ('(?P<t_ANY_f_const_1>(?P<sig>[0-9]+)(?P<exp>([Ee][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_1', 'f_const_1'), None, None]), ('(?P<t_ANY_f_const_2>(?P<sig>[0-9]*\\\\.[0-9]+)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)', [None, ('t_ANY_f_const_2', 'f_const_2'), None, None]), ('(?P<t_ANY_f_const_3>(?P<sig>[0-9]+\\\\.)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)|(?P<t_ANY_f_const_4>(?P<hex>0[xX][a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_3', 'f_const_3'), None, None, None, None, ('t_ANY_f_const_4', 'f_const_4'), None]), ('(?P<t_ANY_f_const_5>(?P<hex>0[xX][a-fA-F0-9]*\\\\.[a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_5', 'f_const_5'), None]), ('(?P<t_ANY_f_const_6>(?P<hex>0[xX][a-fA-F0-9]+\\\\.([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_6', 'f_const_6'), None]), ('(?P<t_ANY_i_const_bin>(?P<p1>0[bB][0-1]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_bin', 'i_const_bin'), None]), ('(?P<t_ANY_i_const_hex>(?P<p1>0[xX][a-fA-F0-9]+)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_hex', 'i_const_hex'), None]), ('(?P<t_ANY_i_const_dec>(?P<p1>[1-9][0-9]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_dec', 'i_const_dec'), None]), ('(?P<t_ANY_i_const_oct>0(?P<p1>[0-7]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_oct', 'i_const_oct'), None]), ('(?P<t_ANY_character_constant>(u8|u|U|L)?\\'(?P<p1>\\\\\\\\.|[^\\\\\\\\\\'])+\\')|(?P<t_ANY_string_literal>\"([^\"\\\\\\\\\\\\n]|(\\\\\\\\[0-9a-zA-Z._~!=&\\\\^\\\\-\\\\\\\\?\\'\"]))*\")|(?P<t_DEFINE_identifier>[a-zA-Z_][a-zA-Z_0-9]*)|(?P<t_DEFINE_newline>\\\\n)', [None, ('t_ANY_character_constant', 'character_constant'), None, None, ('t_ANY_string_literal', 'string_literal'), None, None, ('t_DEFINE_identifier', 'identifier'), ('t_DEFINE_newline', 'newline')]), ('(?P<t_DEFINE_pp_param_op>(\\\\#\\\\#)|(\\\\#))|(?P<t_ANY_ELLIPSIS>\\\\.\\\\.\\\\.)|(?P<t_ANY_INC_OP>\\\\+\\\\+)|(?P<t_ANY_OR_OP>\\\\|\\\\|)|(?P<t_ANY_ADD_ASSIGN>\\\\+=)|(?P<t_ANY_LEFT_ASSIGN><<=)|(?P<t_ANY_MUL_ASSIGN>\\\\*=)|(?P<t_ANY_OR_ASSIGN>\\\\|=)|(?P<t_ANY_RIGHT_ASSIGN>>>=)|(?P<t_ANY_XOR_ASSIGN>\\\\^=)|(?P<t_ANY_AND_ASSIGN>&=)|(?P<t_ANY_AND_OP>&&)|(?P<t_ANY_CONDOP>\\\\?)|(?P<t_ANY_DEC_OP>--)|(?P<t_ANY_DIV_ASSIGN>/=)|(?P<t_ANY_EQ_OP>==)', [None, ('t_DEFINE_pp_param_op', 'pp_param_op'), None, None, (None, 'ELLIPSIS'), (None, 'INC_OP'), (None, 'OR_OP'), (None, 'ADD_ASSIGN'), (None, 'LEFT_ASSIGN'), (None, 'MUL_ASSIGN'), (None, 'OR_ASSIGN'), (None, 'RIGHT_ASSIGN'), (None, 'XOR_ASSIGN'), (None, 'AND_ASSIGN'), (None, 'AND_OP'), (None, 'CONDOP'), (None, 'DEC_OP'), (None, 'DIV_ASSIGN'), (None, 'EQ_OP')]), ('(?P<t_ANY_GE_OP>>=)|(?P<t_ANY_LBRACE>\\\\{)|(?P<t_ANY_LBRACKET>\\\\[)|(?P<t_ANY_LEFT_OP><<)|(?P<t_ANY_LE_OP><=)|(?P<t_ANY_LPAREN>\\\\()|(?P<t_ANY_MOD_ASSIGN>%=)|(?P<t_ANY_NE_OP>!=)|(?P<t_ANY_OR>\\\\|)|(?P<t_ANY_PERIOD>\\\\.)|(?P<t_ANY_PLUS>\\\\+)|(?P<t_ANY_PTR_OP>->)|(?P<t_ANY_RBRACE>\\\\})|(?P<t_ANY_RBRACKET>\\\\])|(?P<t_ANY_RIGHT_OP>>>)|(?P<t_ANY_RPAREN>\\\\))|(?P<t_ANY_SUB_ASSIGN>-=)|(?P<t_ANY_TIMES>\\\\*)|(?P<t_ANY_XOR>\\\\^)|(?P<t_ANY_AND>&)|(?P<t_ANY_COLON>:)|(?P<t_ANY_COMMA>,)|(?P<t_ANY_DIVIDE>/)|(?P<t_ANY_EQUALS>=)|(?P<t_ANY_GT>>)|(?P<t_ANY_LNOT>!)|(?P<t_ANY_LT><)|(?P<t_ANY_MINUS>-)|(?P<t_ANY_MOD>%)|(?P<t_ANY_NOT>~)|(?P<t_ANY_SEMI>;)', [None, (None, 'GE_OP'), (None, 'LBRACE'), (None, 'LBRACKET'), (None, 'LEFT_OP'), (None, 'LE_OP'), (None, 'LPAREN'), (None, 'MOD_ASSIGN'), (None, 'NE_OP'), (None, 'OR'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'PTR_OP'), (None, 'RBRACE'), (None, 'RBRACKET'), (None, 'RIGHT_OP'), (None, 'RPAREN'), (None, 'SUB_ASSIGN'), (None, 'TIMES'), (None, 'XOR'), (None, 'AND'), (None, 'COLON'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQUALS'), (None, 'GT'), (None, 'LNOT'), (None, 'LT'), (None, 'MINUS'), (None, 'MOD'), (None, 'NOT'), (None, 'SEMI')])], 'PRAGMA': [('(?P<t_ANY_directive>\\\\#\\\\s+(?P<lineno>\\\\d+)\\\\s+\"(?P<filename>[^\"]+)\"[ \\\\d]*\\\\n)', [None, ('t_ANY_directive', 'directive'), None, None]), ('(?P<t_ANY_f_const_1>(?P<sig>[0-9]+)(?P<exp>([Ee][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_1', 'f_const_1'), None, None]), ('(?P<t_ANY_f_const_2>(?P<sig>[0-9]*\\\\.[0-9]+)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)', [None, ('t_ANY_f_const_2', 'f_const_2'), None, None]), ('(?P<t_ANY_f_const_3>(?P<sig>[0-9]+\\\\.)(?P<exp>([Ee][+-]?[0-9]+)?)(f|F|l|L)?)|(?P<t_ANY_f_const_4>(?P<hex>0[xX][a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_3', 'f_const_3'), None, None, None, None, ('t_ANY_f_const_4', 'f_const_4'), None]), ('(?P<t_ANY_f_const_5>(?P<hex>0[xX][a-fA-F0-9]*\\\\.[a-fA-F0-9]+([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_5', 'f_const_5'), None]), ('(?P<t_ANY_f_const_6>(?P<hex>0[xX][a-fA-F0-9]+\\\\.([Pp][+-]?[0-9]+))(f|F|l|L)?)', [None, ('t_ANY_f_const_6', 'f_const_6'), None]), ('(?P<t_ANY_i_const_bin>(?P<p1>0[bB][0-1]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_bin', 'i_const_bin'), None]), ('(?P<t_ANY_i_const_hex>(?P<p1>0[xX][a-fA-F0-9]+)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_hex', 'i_const_hex'), None]), ('(?P<t_ANY_i_const_dec>(?P<p1>[1-9][0-9]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_dec', 'i_const_dec'), None]), ('(?P<t_ANY_i_const_oct>0(?P<p1>[0-7]*)(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))?)', [None, ('t_ANY_i_const_oct', 'i_const_oct'), None]), ('(?P<t_ANY_character_constant>(u8|u|U|L)?\\'(?P<p1>\\\\\\\\.|[^\\\\\\\\\\'])+\\')|(?P<t_ANY_string_literal>\"([^\"\\\\\\\\\\\\n]|(\\\\\\\\[0-9a-zA-Z._~!=&\\\\^\\\\-\\\\\\\\?\\'\"]))*\")|(?P<t_PRAGMA_pack>pack)|(?P<t_PRAGMA_newline>\\\\n)', [None, ('t_ANY_character_constant', 'character_constant'), None, None, ('t_ANY_string_literal', 'string_literal'), None, None, ('t_PRAGMA_pack', 'pack'), ('t_PRAGMA_newline', 'newline')]), ('(?P<t_PRAGMA_identifier>[a-zA-Z_][a-zA-Z_0-9]*)|(?P<t_ANY_ELLIPSIS>\\\\.\\\\.\\\\.)|(?P<t_ANY_INC_OP>\\\\+\\\\+)|(?P<t_ANY_OR_OP>\\\\|\\\\|)|(?P<t_ANY_ADD_ASSIGN>\\\\+=)|(?P<t_ANY_LEFT_ASSIGN><<=)|(?P<t_ANY_MUL_ASSIGN>\\\\*=)|(?P<t_ANY_OR_ASSIGN>\\\\|=)|(?P<t_ANY_RIGHT_ASSIGN>>>=)|(?P<t_ANY_XOR_ASSIGN>\\\\^=)|(?P<t_ANY_AND_ASSIGN>&=)|(?P<t_ANY_AND_OP>&&)|(?P<t_ANY_CONDOP>\\\\?)|(?P<t_ANY_DEC_OP>--)|(?P<t_ANY_DIV_ASSIGN>/=)|(?P<t_ANY_EQ_OP>==)', [None, ('t_PRAGMA_identifier', 'identifier'), (None, 'ELLIPSIS'), (None, 'INC_OP'), (None, 'OR_OP'), (None, 'ADD_ASSIGN'), (None, 'LEFT_ASSIGN'), (None, 'MUL_ASSIGN'), (None, 'OR_ASSIGN'), (None, 'RIGHT_ASSIGN'), (None, 'XOR_ASSIGN'), (None, 'AND_ASSIGN'), (None, 'AND_OP'), (None, 'CONDOP'), (None, 'DEC_OP'), (None, 'DIV_ASSIGN'), (None, 'EQ_OP')]), ('(?P<t_ANY_GE_OP>>=)|(?P<t_ANY_LBRACE>\\\\{)|(?P<t_ANY_LBRACKET>\\\\[)|(?P<t_ANY_LEFT_OP><<)|(?P<t_ANY_LE_OP><=)|(?P<t_ANY_LPAREN>\\\\()|(?P<t_ANY_MOD_ASSIGN>%=)|(?P<t_ANY_NE_OP>!=)|(?P<t_ANY_OR>\\\\|)|(?P<t_ANY_PERIOD>\\\\.)|(?P<t_ANY_PLUS>\\\\+)|(?P<t_ANY_PTR_OP>->)|(?P<t_ANY_RBRACE>\\\\})|(?P<t_ANY_RBRACKET>\\\\])|(?P<t_ANY_RIGHT_OP>>>)|(?P<t_ANY_RPAREN>\\\\))|(?P<t_ANY_SUB_ASSIGN>-=)|(?P<t_ANY_TIMES>\\\\*)|(?P<t_ANY_XOR>\\\\^)|(?P<t_ANY_AND>&)|(?P<t_ANY_COLON>:)|(?P<t_ANY_COMMA>,)|(?P<t_ANY_DIVIDE>/)|(?P<t_ANY_EQUALS>=)|(?P<t_ANY_GT>>)|(?P<t_ANY_LNOT>!)|(?P<t_ANY_LT><)|(?P<t_ANY_MINUS>-)|(?P<t_ANY_MOD>%)|(?P<t_ANY_NOT>~)|(?P<t_ANY_SEMI>;)', [None, (None, 'GE_OP'), (None, 'LBRACE'), (None, 'LBRACKET'), (None, 'LEFT_OP'), (None, 'LE_OP'), (None, 'LPAREN'), (None, 'MOD_ASSIGN'), (None, 'NE_OP'), (None, 'OR'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'PTR_OP'), (None, 'RBRACE'), (None, 'RBRACKET'), (None, 'RIGHT_OP'), (None, 'RPAREN'), (None, 'SUB_ASSIGN'), (None, 'TIMES'), (None, 'XOR'), (None, 'AND'), (None, 'COLON'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQUALS'), (None, 'GT'), (None, 'LNOT'), (None, 'LT'), (None, 'MINUS'), (None, 'MOD'), (None, 'NOT'), (None, 'SEMI')])]}\n_lexstateignore = {'INITIAL': ' \\t\\x0b\\x0c\\r', 'DEFINE': ' \\t\\x0b\\x0c\\r', 'PRAGMA': ' \\t\\x0b\\x0c\\r'}\n_lexstateerrorf = {'DEFINE': 't_DEFINE_error', 'INITIAL': 't_INITIAL_error', 'PRAGMA': 't_PRAGMA_error'}\n_lexstateeoff = {}\n"
  },
  {
    "path": "ctypesgen/parser/parsetab.py",
    "content": "\n# new_parsetab.py\n# This file is automatically generated. Do not edit.\n# pylint: disable=W,C,R\n_tabversion = '3.10'\n\n_lr_method = 'LALR'\n\n_lr_signature = 'nonassocIFnonassocELSEADD_ASSIGN AND AND_ASSIGN AND_OP AUTO BREAK CASE CHAR CHARACTER_CONSTANT COLON COMMA CONDOP CONST CONTINUE DEC_OP DEFAULT DIVIDE DIV_ASSIGN DO DOUBLE ELLIPSIS ELSE ENUM EQUALS EQ_OP EXTERN FLOAT FOR F_CONST_1 F_CONST_2 F_CONST_3 F_CONST_4 F_CONST_5 F_CONST_6 GE_OP GOTO GT IDENTIFIER IF INC_OP INLINE INT I_CONST_BIN I_CONST_DEC I_CONST_HEX I_CONST_OCT LBRACE LBRACKET LEFT_ASSIGN LEFT_OP LE_OP LNOT LONG LPAREN LT MINUS MOD MOD_ASSIGN MUL_ASSIGN NE_OP NOT OR OR_ASSIGN OR_OP PERIOD PLUS PP_DEFINE PP_DEFINE_MACRO_NAME PP_DEFINE_NAME PP_END_DEFINE PP_IDENTIFIER_PASTE PP_MACRO_PARAM PP_STRINGIFY PP_UNDEFINE PRAGMA PRAGMA_END PRAGMA_PACK PTR_OP RBRACE RBRACKET REGISTER RESTRICT RETURN RIGHT_ASSIGN RIGHT_OP RPAREN SEMI SHORT SIGNED SIZEOF STATIC STRING_LITERAL STRUCT SUB_ASSIGN SWITCH TIMES TYPEDEF TYPE_NAME UNION UNSIGNED VOID VOLATILE WHILE XOR XOR_ASSIGN _BOOL _NORETURN __ASM__ __ATTRIBUTE__ translation_unit :\\n                         | translation_unit external_declaration\\n                         | translation_unit directive\\n     identifier : IDENTIFIER\\n                   | IDENTIFIER PP_IDENTIFIER_PASTE identifier\\n                   | PP_MACRO_PARAM PP_IDENTIFIER_PASTE identifier\\n                   | IDENTIFIER PP_IDENTIFIER_PASTE PP_MACRO_PARAM\\n                   | PP_MACRO_PARAM PP_IDENTIFIER_PASTE PP_MACRO_PARAM\\n     constant : I_CONST_HEX\\n                 | I_CONST_DEC\\n                 | I_CONST_OCT\\n                 | I_CONST_BIN\\n     constant : F_CONST_1\\n                 | F_CONST_2\\n                 | F_CONST_3\\n                 | F_CONST_4\\n                 | F_CONST_5\\n                 | F_CONST_6\\n     constant : CHARACTER_CONSTANT\\n     string_literal : STRING_LITERAL\\n     multi_string_literal : string_literal\\n                             | macro_param\\n                             | multi_string_literal string_literal\\n                             | multi_string_literal macro_param\\n     macro_param : PP_MACRO_PARAM\\n                    | PP_STRINGIFY PP_MACRO_PARAM\\n     primary_expression : identifier\\n                           | constant\\n                           | multi_string_literal\\n                           | LPAREN expression RPAREN\\n     postfix_expression : primary_expression\\n                           | postfix_expression LBRACKET expression RBRACKET\\n                           | postfix_expression LPAREN RPAREN\\n                           | postfix_expression LPAREN argument_expression_list RPAREN\\n                           | postfix_expression PERIOD IDENTIFIER\\n                           | postfix_expression PTR_OP IDENTIFIER\\n                           | postfix_expression INC_OP\\n                           | postfix_expression DEC_OP\\n     argument_expression_list : assignment_expression\\n                                 | argument_expression_list COMMA assignment_expression\\n                                 | type_name\\n                                 | argument_expression_list COMMA type_name\\n     asm_expression : __ASM__ volatile_opt LPAREN string_literal RPAREN\\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list RPAREN\\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN\\n                       | __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN\\n     str_opt_expr_pair_list :\\n                               | str_opt_expr_pair\\n                               | str_opt_expr_pair_list COMMA str_opt_expr_pair\\n     str_opt_expr_pair : string_literal\\n                          | string_literal LPAREN expression RPAREN\\n     volatile_opt :\\n                     | VOLATILE\\n     unary_expression : postfix_expression\\n                         | INC_OP unary_expression\\n                         | DEC_OP unary_expression\\n                         | unary_operator cast_expression\\n                         | SIZEOF unary_expression\\n                         | SIZEOF LPAREN type_name RPAREN\\n                         | asm_expression\\n     unary_operator : AND\\n                       | TIMES\\n                       | PLUS\\n                       | MINUS\\n                       | NOT\\n                       | LNOT\\n     cast_expression : unary_expression\\n                        | LPAREN type_name RPAREN cast_expression\\n     multiplicative_expression : cast_expression\\n                                  | multiplicative_expression TIMES cast_expression\\n                                  | multiplicative_expression DIVIDE cast_expression\\n                                  | multiplicative_expression MOD cast_expression\\n     additive_expression : multiplicative_expression\\n                            | additive_expression PLUS multiplicative_expression\\n                            | additive_expression MINUS multiplicative_expression\\n     shift_expression : additive_expression\\n                         | shift_expression LEFT_OP additive_expression\\n                         | shift_expression RIGHT_OP additive_expression\\n     relational_expression : shift_expression\\n                              | relational_expression LT shift_expression\\n                              | relational_expression GT shift_expression\\n                              | relational_expression LE_OP shift_expression\\n                              | relational_expression GE_OP shift_expression\\n     equality_expression : relational_expression\\n                            | equality_expression EQ_OP relational_expression\\n                            | equality_expression NE_OP relational_expression\\n     and_expression : equality_expression\\n                       | and_expression AND equality_expression\\n     exclusive_or_expression : and_expression\\n                                | exclusive_or_expression XOR and_expression\\n     inclusive_or_expression : exclusive_or_expression\\n                                | inclusive_or_expression OR exclusive_or_expression\\n     logical_and_expression : inclusive_or_expression\\n                               | logical_and_expression AND_OP inclusive_or_expression\\n     logical_or_expression : logical_and_expression\\n                              | logical_or_expression OR_OP logical_and_expression\\n     conditional_expression : logical_or_expression\\n                               | logical_or_expression CONDOP expression COLON conditional_expression\\n     assignment_expression : conditional_expression\\n                              | unary_expression assignment_operator assignment_expression\\n     assignment_operator : EQUALS\\n                            | MUL_ASSIGN\\n                            | DIV_ASSIGN\\n                            | MOD_ASSIGN\\n                            | ADD_ASSIGN\\n                            | SUB_ASSIGN\\n                            | LEFT_ASSIGN\\n                            | RIGHT_ASSIGN\\n                            | AND_ASSIGN\\n                            | XOR_ASSIGN\\n                            | OR_ASSIGN\\n     expression : assignment_expression\\n                   | expression COMMA assignment_expression\\n     constant_expression : conditional_expression\\n     declaration : declaration_impl SEMI\\n     declaration_impl : declaration_specifier_list\\n                         | declaration_specifier_list init_declarator_list\\n     declaration_specifier_list : gcc_attributes declaration_specifier gcc_attributes\\n                                   | declaration_specifier_list declaration_specifier gcc_attributes\\n     declaration_specifier : storage_class_specifier\\n                              | type_specifier\\n                              | type_qualifier\\n                              | function_specifier\\n     init_declarator_list : init_declarator\\n                             | init_declarator_list COMMA init_declarator\\n     init_declarator : declarator gcc_attributes\\n                        | declarator gcc_attributes EQUALS initializer\\n     storage_class_specifier : TYPEDEF\\n                                | EXTERN\\n                                | STATIC\\n                                | AUTO\\n                                | REGISTER\\n     type_specifier : VOID\\n                       | _BOOL\\n                       | CHAR\\n                       | SHORT\\n                       | INT\\n                       | LONG\\n                       | FLOAT\\n                       | DOUBLE\\n                       | SIGNED\\n                       | UNSIGNED\\n                       | struct_or_union_specifier\\n                       | enum_specifier\\n                       | TYPE_NAME\\n     struct_or_union_specifier : struct_or_union gcc_attributes IDENTIFIER LBRACE member_declaration_list RBRACE\\n                                  | struct_or_union gcc_attributes TYPE_NAME LBRACE member_declaration_list RBRACE\\n                                  | struct_or_union gcc_attributes LBRACE member_declaration_list RBRACE\\n                                  | struct_or_union gcc_attributes IDENTIFIER\\n                                  | struct_or_union gcc_attributes TYPE_NAME\\n     struct_or_union : STRUCT\\n                        | UNION\\n     gcc_attributes :\\n                       | gcc_attributes gcc_attribute\\n     gcc_attribute : __ATTRIBUTE__ LPAREN LPAREN gcc_attrib_list RPAREN RPAREN\\n     gcc_attrib_list : gcc_attrib\\n                        | gcc_attrib_list COMMA gcc_attrib\\n     gcc_attrib :\\n                   | IDENTIFIER\\n                   | IDENTIFIER LPAREN argument_expression_list RPAREN\\n     member_declaration_list : member_declaration\\n                                | member_declaration_list member_declaration\\n     member_declaration : specifier_qualifier_list member_declarator_list SEMI\\n                           | specifier_qualifier_list SEMI\\n     specifier_qualifier_list : gcc_attributes specifier_qualifier gcc_attributes\\n                                 | specifier_qualifier_list specifier_qualifier gcc_attributes\\n     specifier_qualifier : type_specifier\\n                            | type_qualifier\\n     member_declarator_list : member_declarator\\n                               | member_declarator_list COMMA member_declarator\\n     member_declarator : declarator gcc_attributes\\n                          | COLON constant_expression gcc_attributes\\n                          | declarator COLON constant_expression gcc_attributes\\n     enum_specifier : ENUM LBRACE enumerator_list RBRACE\\n                       | ENUM IDENTIFIER LBRACE enumerator_list RBRACE\\n                       | ENUM IDENTIFIER\\n     enumerator_list : enumerator_list_iso\\n                        | enumerator_list_iso COMMA\\n     enumerator_list_iso : enumerator\\n                            | enumerator_list_iso COMMA enumerator\\n     enumerator : IDENTIFIER\\n                   | IDENTIFIER EQUALS constant_expression\\n     type_qualifier : CONST\\n                       | VOLATILE\\n                       | RESTRICT\\n     function_specifier : INLINE\\n                           | _NORETURN\\n     declarator : pointer direct_declarator\\n                   | direct_declarator\\n     direct_declarator : IDENTIFIER\\n                          | LPAREN gcc_attributes declarator RPAREN\\n                          | direct_declarator LBRACKET constant_expression RBRACKET\\n                          | direct_declarator LBRACKET RBRACKET\\n                          | direct_declarator LPAREN parameter_type_list RPAREN\\n                          | direct_declarator LPAREN identifier_list RPAREN\\n                          | direct_declarator LPAREN RPAREN\\n     pointer : TIMES\\n                | TIMES type_qualifier_list\\n                | TIMES pointer\\n                | TIMES type_qualifier_list pointer\\n     type_qualifier_list : type_qualifier\\n                            | gcc_attribute\\n                            | type_qualifier_list type_qualifier\\n                            | type_qualifier_list gcc_attribute\\n     parameter_type_list : parameter_list\\n                            | parameter_list COMMA ELLIPSIS\\n     parameter_list : parameter_declaration\\n                       | parameter_list COMMA parameter_declaration\\n     parameter_declaration : declaration_specifier_list declarator gcc_attributes\\n                              | declaration_specifier_list abstract_declarator\\n                              | declaration_specifier_list\\n     identifier_list : IDENTIFIER\\n                        | identifier_list COMMA IDENTIFIER\\n     type_name : specifier_qualifier_list\\n                  | specifier_qualifier_list abstract_declarator\\n     abstract_declarator : pointer\\n                            | direct_abstract_declarator         gcc_attributes\\n                            | pointer direct_abstract_declarator gcc_attributes\\n     direct_abstract_declarator : LPAREN gcc_attributes abstract_declarator RPAREN\\n                                   | LBRACKET RBRACKET\\n                                   | LBRACKET constant_expression RBRACKET\\n                                   | direct_abstract_declarator LBRACKET RBRACKET\\n                                   | direct_abstract_declarator LBRACKET constant_expression RBRACKET\\n                                   | LPAREN RPAREN\\n                                   | LPAREN parameter_type_list RPAREN\\n                                   | direct_abstract_declarator LPAREN RPAREN\\n                                   | direct_abstract_declarator LPAREN parameter_type_list RPAREN\\n     initializer : assignment_expression\\n                    | LBRACE initializer_list RBRACE\\n                    | LBRACE initializer_list COMMA RBRACE\\n     initializer_list : initializer\\n                         | initializer_list COMMA initializer\\n     statement : labeled_statement\\n                  | compound_statement\\n                  | expression_statement\\n                  | selection_statement\\n                  | iteration_statement\\n                  | jump_statement\\n     labeled_statement : IDENTIFIER COLON statement\\n                          | CASE constant_expression COLON statement\\n                          | DEFAULT COLON statement\\n     compound_statement : LBRACE RBRACE\\n                           | LBRACE statement_list RBRACE\\n                           | LBRACE declaration_list RBRACE\\n                           | LBRACE declaration_list statement_list RBRACE\\n     compound_statement : LBRACE error RBRACE\\n     declaration_list : declaration\\n                         | declaration_list declaration\\n     statement_list : statement\\n                       | statement_list statement\\n     expression_statement : SEMI\\n                             | expression SEMI\\n     expression_statement : error SEMI\\n     selection_statement : IF LPAREN expression RPAREN statement %prec IF\\n                            | IF LPAREN expression RPAREN statement ELSE statement\\n                            | SWITCH LPAREN expression RPAREN statement\\n     iteration_statement : WHILE LPAREN expression RPAREN statement\\n                            | DO statement WHILE LPAREN expression RPAREN SEMI\\n                            | FOR LPAREN expression_statement expression_statement RPAREN statement\\n                            | FOR LPAREN expression_statement expression_statement expression RPAREN statement\\n     jump_statement : GOTO IDENTIFIER SEMI\\n                       | CONTINUE SEMI\\n                       | BREAK SEMI\\n                       | RETURN SEMI\\n                       | RETURN expression SEMI\\n     external_declaration : declaration\\n                             | function_definition\\n     function_definition : declaration_specifier_list declarator declaration_list compound_statement\\n                            | declaration_specifier_list declarator compound_statement\\n                            | declarator declaration_list compound_statement\\n                            | declarator compound_statement\\n     directive : define\\n                  | undefine\\n                  | pragma\\n     define : PP_DEFINE PP_DEFINE_NAME PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_NAME type_name PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_NAME constant_expression PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN constant_expression PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN PP_END_DEFINE\\n               | PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN constant_expression PP_END_DEFINE\\n     define : PP_DEFINE error PP_END_DEFINE\\n     undefine : PP_UNDEFINE PP_DEFINE_NAME PP_END_DEFINE\\n     macro_parameter_list : PP_MACRO_PARAM\\n                             | macro_parameter_list COMMA PP_MACRO_PARAM\\n     pragma : pragma_pack\\n               | PRAGMA pragma_directive_list PRAGMA_END\\n     pragma_pack : PRAGMA PRAGMA_PACK LPAREN RPAREN PRAGMA_END\\n                    | PRAGMA PRAGMA_PACK LPAREN constant RPAREN PRAGMA_END\\n                    | PRAGMA PRAGMA_PACK LPAREN pragma_pack_stack_args RPAREN PRAGMA_END\\n     pragma_pack_stack_args : IDENTIFIER\\n                               | IDENTIFIER COMMA IDENTIFIER\\n                               | IDENTIFIER COMMA IDENTIFIER COMMA constant\\n                               | IDENTIFIER COMMA constant COMMA IDENTIFIER\\n                               | IDENTIFIER COMMA constant\\n     pragma_directive_list : pragma_directive\\n                              | pragma_directive_list pragma_directive\\n     pragma_directive : IDENTIFIER\\n                         | string_literal\\n    '\n    \n_lr_action_items = {'PP_DEFINE':([0,1,2,3,4,5,6,7,8,15,22,59,85,92,94,167,177,179,180,197,208,211,213,284,285,325,383,387,434,435,438,439,468,],[-1,12,-2,-3,-266,-267,-272,-273,-274,-286,-115,-271,-269,-270,-242,-275,-282,-283,-287,-268,-243,-244,-246,-276,-277,-245,-278,-288,-279,-280,-289,-290,-281,]),'PP_UNDEFINE':([0,1,2,3,4,5,6,7,8,15,22,59,85,92,94,167,177,179,180,197,208,211,213,284,285,325,383,387,434,435,438,439,468,],[-1,14,-2,-3,-266,-267,-272,-273,-274,-286,-115,-271,-269,-270,-242,-275,-282,-283,-287,-268,-243,-244,-246,-276,-277,-245,-278,-288,-279,-280,-289,-290,-281,]),'PRAGMA':([0,1,2,3,4,5,6,7,8,15,22,59,85,92,94,167,177,179,180,197,208,211,213,284,285,325,383,387,434,435,438,439,468,],[-1,16,-2,-3,-266,-267,-272,-273,-274,-286,-115,-271,-269,-270,-242,-275,-282,-283,-287,-268,-243,-244,-246,-276,-277,-245,-278,-288,-279,-280,-289,-290,-281,]),'TIMES':([0,1,2,3,4,5,6,7,8,10,13,15,20,22,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,59,60,61,62,63,66,73,74,75,78,80,82,83,85,87,88,91,92,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,122,124,125,126,127,128,129,130,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,165,167,170,173,174,175,177,179,180,183,193,195,196,197,198,200,202,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,253,254,255,256,257,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,284,285,287,290,291,292,293,294,295,312,315,319,321,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,356,357,367,368,369,370,371,372,375,377,382,383,385,387,398,402,408,412,413,414,415,416,417,418,419,420,421,422,423,424,434,435,438,439,444,446,447,449,451,453,454,455,456,458,463,468,471,479,481,482,490,491,492,493,495,502,504,],[-1,20,-2,-3,-266,-267,-272,-273,-274,20,-153,-286,20,-115,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-271,-247,134,20,134,20,-20,-153,-154,134,20,-201,-202,-269,20,-119,-176,-270,-248,-242,134,134,-249,-233,-234,-235,-236,-237,-238,-251,-4,134,134,134,134,-67,-54,134,134,134,-69,134,-60,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,280,-275,20,-67,134,-4,-282,-283,-287,-118,20,-203,-204,-268,134,-149,-150,-243,-250,-244,134,-246,-253,134,134,-252,134,134,134,134,134,-262,-263,-264,134,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,134,134,134,134,-37,-38,-55,134,-56,-57,-58,134,134,134,-23,-24,-25,134,134,-26,134,134,134,134,134,134,134,134,134,134,134,134,134,-276,-277,-153,-167,-168,-153,134,-153,134,-153,134,20,-174,134,-245,-239,-5,-7,134,-241,-30,134,134,-261,-265,-33,-35,-36,-8,-6,280,280,-70,-71,-72,-166,134,20,-165,-278,134,-288,20,-148,134,-175,-240,134,-68,134,134,134,134,134,-32,-34,134,-59,-279,-280,-289,-290,134,134,-146,20,134,-147,-254,-256,-257,134,-43,-281,-155,134,-259,134,-255,-258,-260,134,-44,-45,-46,]),'IDENTIFIER':([0,1,2,3,4,5,6,7,8,10,13,15,16,18,20,22,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,59,60,61,62,63,66,68,70,71,72,73,74,75,78,79,80,81,82,83,85,87,88,89,90,91,92,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,116,119,125,126,127,129,133,134,135,136,137,138,167,174,177,179,180,181,182,183,193,194,195,196,197,198,200,202,207,208,209,211,212,213,214,215,216,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,254,258,259,262,266,267,268,270,271,272,273,274,275,276,277,278,279,280,281,282,284,285,287,290,291,293,294,295,303,307,311,312,315,319,321,322,323,325,326,329,330,334,338,339,340,372,375,382,383,385,387,390,398,402,408,412,413,414,416,417,418,419,420,423,434,435,438,439,443,444,446,447,449,451,453,454,455,456,458,468,470,471,479,481,482,490,491,492,493,],[-1,21,-2,-3,-266,-267,-272,-273,-274,21,-153,-286,71,21,-197,-115,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,91,-151,-152,-271,-247,106,21,175,21,71,-296,-298,-299,-20,-153,-154,175,191,-198,-199,-201,-202,-269,21,-119,200,206,-176,-270,-248,-242,106,106,-249,-233,-234,-235,-236,-237,-238,-251,175,175,106,228,175,175,175,175,175,-61,-62,-63,-64,-65,-66,-275,175,-282,-283,-287,-297,302,-118,21,-200,-203,-204,-268,175,-149,-150,206,-243,-250,-244,106,-246,-253,106,175,106,-252,175,175,175,175,175,-262,-263,-264,175,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,175,175,175,175,349,350,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-276,-277,-153,-167,-168,175,-153,175,393,394,21,-153,175,21,-174,206,175,-245,-239,106,-241,175,175,-261,-265,-166,175,-165,-278,175,-288,440,21,-148,175,-175,-240,106,106,106,175,175,175,175,-279,-280,-289,-290,393,175,175,-146,21,175,-147,-254,-256,-257,106,-281,487,-155,106,-259,106,-255,-258,-260,175,]),'LPAREN':([0,1,2,3,4,5,6,7,8,10,13,15,18,19,20,21,22,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,59,60,61,62,63,64,66,69,73,74,75,76,77,78,80,81,82,83,85,87,88,91,92,93,94,95,96,98,99,100,101,102,103,104,105,106,107,110,111,112,113,114,115,119,124,125,126,127,129,132,133,134,135,136,137,138,139,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,167,170,174,175,177,179,180,183,184,186,188,193,194,195,196,197,198,200,202,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,254,258,259,260,261,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,284,285,287,288,289,290,291,292,293,294,295,298,304,305,306,311,312,315,319,321,323,325,326,327,328,329,330,333,334,337,338,339,340,345,349,350,356,357,372,373,375,377,378,380,382,383,385,387,393,398,402,408,412,413,414,416,417,418,419,420,421,422,423,427,429,432,433,434,435,438,439,444,446,447,449,451,453,454,455,456,458,465,466,467,468,471,479,481,482,483,490,491,492,493,],[-1,13,-2,-3,-266,-267,-272,-273,-274,13,-153,-286,13,79,-197,-190,-115,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-271,-247,111,13,174,176,13,182,-20,-153,-154,184,79,174,-198,-199,-201,-202,-269,13,-119,-176,-270,-248,-242,111,111,-249,-233,-234,-235,-236,-237,-238,-251,-4,174,221,111,224,225,111,227,111,248,254,254,174,258,-31,-61,-62,-63,-64,-65,-66,-52,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-275,292,111,-4,-282,-283,-287,-118,303,-193,-196,312,-200,-203,-204,-268,111,-149,-150,-243,-250,-244,111,-246,-253,111,111,-252,111,111,111,111,111,-262,-263,-264,111,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,111,174,111,111,-37,-38,111,111,174,353,-53,174,-23,-24,-25,174,174,-26,174,174,174,174,174,174,174,174,174,174,174,174,174,-276,-277,-153,292,376,-167,-168,-153,174,-153,174,-191,-192,-194,-195,312,-153,111,13,-174,174,-245,-239,-5,-7,111,-241,-30,174,418,111,-261,-265,-33,-35,-36,-8,-6,-166,376,174,292,-224,-220,-165,-278,174,-288,444,312,-148,174,-175,-240,111,111,111,111,111,174,-32,-34,111,-222,-226,-225,-221,-279,-280,-289,-290,111,111,-146,13,174,-147,-254,-256,-257,111,-223,-227,-219,-281,-155,111,-259,111,493,-255,-258,-260,111,]),'$end':([0,1,2,3,4,5,6,7,8,15,22,59,85,92,94,167,177,179,180,197,208,211,213,284,285,325,383,387,434,435,438,439,468,],[-1,0,-2,-3,-266,-267,-272,-273,-274,-286,-115,-271,-269,-270,-242,-275,-282,-283,-287,-268,-243,-244,-246,-276,-277,-245,-278,-288,-279,-280,-289,-290,-281,]),'__ATTRIBUTE__':([0,1,2,3,4,5,6,7,8,11,13,15,17,19,20,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,63,66,73,74,75,77,79,80,82,83,84,85,86,88,89,91,92,93,94,96,111,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,166,167,171,172,173,174,175,177,179,180,183,186,188,195,196,197,200,201,202,208,211,213,248,251,252,253,255,256,257,258,263,264,265,269,283,284,285,287,289,290,291,292,294,298,304,305,306,308,309,312,316,317,318,320,321,325,327,328,333,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,376,377,378,380,382,383,387,397,398,401,402,403,405,407,409,412,415,421,422,423,424,426,427,429,432,433,434,435,438,439,444,447,448,450,452,453,460,463,465,466,467,468,471,477,478,489,495,502,504,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,-153,-153,-286,76,-189,76,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-151,-152,-153,-271,-247,-153,-153,76,-20,-153,-154,-188,-153,76,-201,-202,-153,-269,76,76,76,-176,-270,-248,-242,-153,-153,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-153,-275,-114,76,-67,-153,-4,-282,-283,-287,76,-193,-196,-203,-204,-268,-149,-153,-150,-243,-244,-246,-153,-37,-38,-55,-56,-57,-58,-153,-23,-24,-25,-26,76,-276,-277,-153,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-153,-161,-153,-174,-245,-5,-7,-30,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,76,-153,76,-153,76,-224,-220,76,-278,-288,76,76,-153,-148,-162,-164,-153,-153,-175,-68,-32,-34,-153,-59,76,-222,-226,-225,-221,-279,-280,-289,-290,-153,-146,-163,76,-153,-147,-98,-43,-223,-227,-219,-281,-155,-153,76,76,-44,-45,-46,]),'TYPEDEF':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,31,-153,-286,31,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,31,-153,-154,-188,-153,-153,-269,31,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,31,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,31,-278,-288,31,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'EXTERN':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,32,-153,-286,32,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,32,-153,-154,-188,-153,-153,-269,32,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,32,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,32,-278,-288,32,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'STATIC':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,33,-153,-286,33,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,33,-153,-154,-188,-153,-153,-269,33,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,33,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,33,-278,-288,33,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'AUTO':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,34,-153,-286,34,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,34,-153,-154,-188,-153,-153,-269,34,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,34,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,34,-278,-288,34,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'REGISTER':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,35,-153,-286,35,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,35,-153,-154,-188,-153,-153,-269,35,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,35,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,35,-278,-288,35,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'VOID':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,36,-153,-286,36,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,36,-153,-153,-154,-188,-153,-153,-269,36,-119,-176,-270,-248,-242,-153,-153,-275,36,36,-153,-282,-283,-287,-118,-193,-196,36,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,36,-153,-174,-245,-166,-153,36,-165,-278,-288,36,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'_BOOL':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,37,-153,-286,37,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,37,-153,-153,-154,-188,-153,-153,-269,37,-119,-176,-270,-248,-242,-153,-153,-275,37,37,-153,-282,-283,-287,-118,-193,-196,37,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,37,-153,-174,-245,-166,-153,37,-165,-278,-288,37,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'CHAR':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,38,-153,-286,38,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,38,-153,-153,-154,-188,-153,-153,-269,38,-119,-176,-270,-248,-242,-153,-153,-275,38,38,-153,-282,-283,-287,-118,-193,-196,38,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,38,-153,-174,-245,-166,-153,38,-165,-278,-288,38,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'SHORT':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,39,-153,-286,39,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,39,-153,-153,-154,-188,-153,-153,-269,39,-119,-176,-270,-248,-242,-153,-153,-275,39,39,-153,-282,-283,-287,-118,-193,-196,39,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,39,-153,-174,-245,-166,-153,39,-165,-278,-288,39,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'INT':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,40,-153,-286,40,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,40,-153,-153,-154,-188,-153,-153,-269,40,-119,-176,-270,-248,-242,-153,-153,-275,40,40,-153,-282,-283,-287,-118,-193,-196,40,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,40,-153,-174,-245,-166,-153,40,-165,-278,-288,40,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'LONG':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,41,-153,-286,41,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,41,-153,-153,-154,-188,-153,-153,-269,41,-119,-176,-270,-248,-242,-153,-153,-275,41,41,-153,-282,-283,-287,-118,-193,-196,41,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,41,-153,-174,-245,-166,-153,41,-165,-278,-288,41,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'FLOAT':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,42,-153,-286,42,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,42,-153,-153,-154,-188,-153,-153,-269,42,-119,-176,-270,-248,-242,-153,-153,-275,42,42,-153,-282,-283,-287,-118,-193,-196,42,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,42,-153,-174,-245,-166,-153,42,-165,-278,-288,42,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'DOUBLE':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,43,-153,-286,43,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,43,-153,-153,-154,-188,-153,-153,-269,43,-119,-176,-270,-248,-242,-153,-153,-275,43,43,-153,-282,-283,-287,-118,-193,-196,43,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,43,-153,-174,-245,-166,-153,43,-165,-278,-288,43,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'SIGNED':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,44,-153,-286,44,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,44,-153,-153,-154,-188,-153,-153,-269,44,-119,-176,-270,-248,-242,-153,-153,-275,44,44,-153,-282,-283,-287,-118,-193,-196,44,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,44,-153,-174,-245,-166,-153,44,-165,-278,-288,44,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'UNSIGNED':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,45,-153,-286,45,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,45,-153,-153,-154,-188,-153,-153,-269,45,-119,-176,-270,-248,-242,-153,-153,-275,45,45,-153,-282,-283,-287,-118,-193,-196,45,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,45,-153,-174,-245,-166,-153,45,-165,-278,-288,45,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'TYPE_NAME':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,62,63,74,75,77,79,84,85,86,88,89,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,48,-153,-286,48,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-151,-152,-153,-271,-247,-153,48,-153,-153,-154,-188,-153,-153,-269,48,-119,202,-176,-270,-248,-242,-153,-153,-275,48,48,-153,-282,-283,-287,-118,-193,-196,48,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,48,-153,-174,-245,-166,-153,48,-165,-278,-288,48,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'CONST':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,20,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,80,82,83,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,195,196,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,49,-153,-286,49,-189,49,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,49,-153,-153,-154,-188,-153,49,-201,-202,-153,-269,49,-119,-176,-270,-248,-242,-153,-153,-275,49,49,-153,-282,-283,-287,-118,-193,-196,49,-203,-204,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,49,-153,-174,-245,-166,-153,49,-165,-278,-288,49,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'VOLATILE':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,20,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,80,82,83,84,85,86,88,91,92,93,94,96,111,139,167,170,172,174,177,179,180,183,186,188,193,195,196,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,50,-153,-286,50,-189,50,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,50,-153,-153,-154,-188,-153,50,-201,-202,-153,-269,50,-119,-176,-270,-248,-242,-153,-153,261,-275,50,50,-153,-282,-283,-287,-118,-193,-196,50,-203,-204,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,50,-153,-174,-245,-166,-153,50,-165,-278,-288,50,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'RESTRICT':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,20,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,80,82,83,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,195,196,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,51,-153,-286,51,-189,51,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,51,-153,-153,-154,-188,-153,51,-201,-202,-153,-269,51,-119,-176,-270,-248,-242,-153,-153,-275,51,51,-153,-282,-283,-287,-118,-193,-196,51,-203,-204,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,51,-153,-174,-245,-166,-153,51,-165,-278,-288,51,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'INLINE':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,52,-153,-286,52,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,52,-153,-154,-188,-153,-153,-269,52,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,52,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,52,-278,-288,52,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'_NORETURN':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,74,75,77,79,84,85,86,88,91,92,93,94,96,167,177,179,180,183,186,188,193,197,200,202,208,211,213,284,285,292,298,304,305,306,308,312,321,325,376,377,383,387,398,402,412,434,435,438,439,447,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,53,-153,-286,53,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,53,-153,-154,-188,-153,-153,-269,53,-119,-176,-270,-248,-242,-153,-275,-282,-283,-287,-118,-193,-196,53,-268,-149,-150,-243,-244,-246,-276,-277,-153,-191,-192,-194,-195,-153,-153,-174,-245,-153,53,-278,-288,53,-148,-175,-279,-280,-289,-290,-146,-147,-281,-155,]),'ENUM':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,55,-153,-286,55,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,55,-153,-153,-154,-188,-153,-153,-269,55,-119,-176,-270,-248,-242,-153,-153,-275,55,55,-153,-282,-283,-287,-118,-193,-196,55,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,55,-153,-174,-245,-166,-153,55,-165,-278,-288,55,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'STRUCT':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,56,-153,-286,56,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,56,-153,-153,-154,-188,-153,-153,-269,56,-119,-176,-270,-248,-242,-153,-153,-275,56,56,-153,-282,-283,-287,-118,-193,-196,56,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,56,-153,-174,-245,-166,-153,56,-165,-278,-288,56,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'UNION':([0,1,2,3,4,5,6,7,8,10,11,15,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,58,59,60,61,62,63,74,75,77,79,84,85,86,88,91,92,93,94,96,111,167,170,172,174,177,179,180,183,186,188,193,197,200,201,202,208,211,213,248,258,284,285,287,290,291,292,294,298,304,305,306,308,312,316,317,318,319,320,321,325,372,376,377,382,383,387,398,401,402,403,405,409,412,423,434,435,438,439,444,447,448,453,468,471,],[-1,-153,-2,-3,-266,-267,-272,-273,-274,57,-153,-286,57,-189,-190,-115,-153,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-153,-271,-247,-153,57,-153,-153,-154,-188,-153,-153,-269,57,-119,-176,-270,-248,-242,-153,-153,-275,57,57,-153,-282,-283,-287,-118,-193,-196,57,-268,-149,-153,-150,-243,-244,-246,-153,-153,-276,-277,-153,-167,-168,-153,-153,-191,-192,-194,-195,-153,-153,-153,-153,-161,57,-153,-174,-245,-166,-153,57,-165,-278,-288,57,-153,-148,-162,-164,-153,-175,-153,-279,-280,-289,-290,-153,-146,-163,-147,-281,-155,]),'SEMI':([9,10,19,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,60,61,62,73,74,75,77,86,88,91,93,94,95,96,97,98,99,100,101,102,103,104,105,106,109,114,117,118,119,120,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,166,171,173,175,183,186,188,199,200,202,208,209,210,211,212,213,214,215,218,219,227,228,229,230,231,232,251,252,253,255,256,257,263,264,265,269,283,287,290,291,294,298,304,305,306,313,314,319,321,325,326,327,328,329,330,331,333,338,339,340,341,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,382,402,404,406,407,412,413,414,415,416,417,421,422,424,445,447,450,452,453,454,455,456,458,460,463,471,474,476,477,478,479,480,481,482,489,490,491,492,495,502,504,],[22,-116,-189,-190,-115,-153,-117,-153,-124,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-247,105,-116,-20,-153,-154,-188,-126,-119,-176,-248,-242,105,105,214,-249,-233,-234,-235,-236,-237,-238,-251,-4,219,105,229,230,231,-112,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-153,-114,-67,-4,-118,-193,-196,-125,-149,-150,-243,-250,214,-244,105,-246,-253,105,105,-252,105,339,-262,-263,-264,340,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-126,-153,-167,-168,-153,-191,-192,-194,-195,-127,-228,405,-174,-245,-239,-5,-7,105,-241,-113,-30,105,-261,-265,-100,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-166,-165,-148,448,-169,-153,-175,-240,105,-68,105,105,-32,-34,-59,-229,-146,-171,-153,-147,-254,-256,-257,105,-98,-43,-155,-230,-170,-153,-172,105,491,-259,105,-173,-255,-258,-260,-44,-45,-46,]),'LBRACE':([11,19,21,22,23,54,55,56,57,58,60,61,75,77,84,89,91,93,94,95,96,98,99,100,101,102,103,104,105,114,186,188,198,200,202,208,209,211,212,213,214,215,218,219,229,230,231,298,304,305,306,315,325,326,329,330,339,340,413,414,416,417,446,454,455,456,458,471,479,481,482,490,491,492,],[61,-189,-190,-115,61,-153,90,-151,-152,61,-247,61,-154,-188,61,201,207,-248,-242,61,61,-249,-233,-234,-235,-236,-237,-238,-251,61,-193,-196,315,316,320,-243,-250,-244,61,-246,-253,61,61,-252,-262,-263,-264,-191,-192,-194,-195,315,-245,-239,61,-241,-261,-265,-240,61,61,61,315,-254,-256,-257,61,-155,61,-259,61,-255,-258,-260,]),'PP_DEFINE_NAME':([12,14,],[63,67,]),'PP_DEFINE_MACRO_NAME':([12,],[64,]),'error':([12,22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,227,229,230,231,325,326,329,330,338,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[65,-115,-247,97,-248,-242,210,210,-249,-233,-234,-235,-236,-237,-238,-251,210,-243,-250,-244,210,-246,-253,210,210,-252,210,-262,-263,-264,-245,-239,210,-241,210,-261,-265,-240,210,210,210,-254,-256,-257,210,210,-259,210,-255,-258,-260,]),'PRAGMA_PACK':([16,],[69,]),'STRING_LITERAL':([16,22,60,61,63,68,70,71,72,73,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,140,144,146,158,174,181,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,353,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,464,479,481,482,490,491,492,493,494,496,501,],[73,-115,-247,73,73,73,-296,-298,-299,-20,73,-248,-242,73,73,-249,-233,-234,-235,-236,-237,-238,-251,73,73,73,73,73,73,73,73,-61,-62,-63,-64,-65,-66,-21,73,-25,-22,73,-297,73,-243,-250,-244,73,-246,-253,73,73,-252,73,73,73,73,73,-262,-263,-264,73,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,73,73,73,73,73,73,73,73,-23,-24,-25,73,73,-26,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,-245,-239,73,-241,73,73,-261,-265,73,73,73,73,-240,73,73,73,73,73,73,73,73,73,73,-254,-256,-257,73,73,73,-259,73,-255,-258,-260,73,73,73,73,]),'EQUALS':([19,21,23,73,75,77,86,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,166,173,175,186,188,206,251,252,253,255,256,257,263,264,265,269,283,298,304,305,306,327,328,333,345,349,350,356,357,415,421,422,424,463,471,495,502,504,],[-189,-190,-153,-20,-154,-188,198,-4,234,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-153,-67,-4,-193,-196,323,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,198,-191,-192,-194,-195,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-155,-44,-45,-46,]),'COMMA':([19,20,21,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,73,74,75,77,80,81,82,83,86,88,91,106,109,120,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,166,170,171,173,175,183,186,188,189,190,191,192,193,194,195,196,199,200,202,204,205,206,222,232,251,252,253,255,256,257,263,264,265,269,283,286,287,288,289,290,291,294,296,297,298,302,303,304,305,306,309,310,311,313,314,321,327,328,331,332,333,335,336,341,342,343,344,345,346,347,348,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,378,380,382,391,392,393,394,396,397,399,400,402,404,406,407,410,411,412,415,421,422,424,426,427,429,432,433,437,440,441,443,445,447,450,452,453,457,459,460,461,462,463,464,465,466,467,471,472,473,474,475,476,477,478,483,484,485,488,489,494,495,497,498,499,500,501,502,503,504,],[-189,-197,-190,-153,87,-153,-124,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-20,-153,-154,-188,-198,-199,-201,-202,-126,-119,-176,-4,220,-112,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-153,-214,-114,-67,-4,-118,-193,-196,307,308,-212,-207,-211,-200,-203,-204,-125,-149,-150,322,-179,-181,220,220,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-126,-215,-153,-216,-153,-167,-168,-153,386,-284,-191,390,-158,-192,-194,-195,-153,-210,-216,-127,-228,-174,-5,-7,-113,220,-30,220,220,-100,220,-96,220,-33,423,-39,-41,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-166,-153,-217,-224,-220,-165,443,-156,-159,-213,-208,-209,446,-231,-148,449,-169,-153,-180,-182,-175,-68,-32,-34,-59,-218,-222,-226,-225,-221,-285,469,470,-158,-229,-146,-171,-153,-147,220,220,-98,-40,-42,-43,-47,-223,-227,-219,-155,-157,423,-230,-232,-170,-153,-172,-50,496,-48,-160,-173,-47,-44,220,496,-49,-51,-47,-45,496,-46,]),'RPAREN':([19,20,21,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,73,74,75,77,79,80,81,82,83,88,91,105,120,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,170,173,175,176,178,182,183,186,187,188,189,190,191,192,193,194,195,196,200,202,214,219,222,223,248,251,252,253,255,256,257,263,264,265,269,286,287,288,289,290,291,292,294,296,297,298,300,301,302,303,304,305,306,309,310,311,312,321,327,328,331,332,333,335,336,341,343,345,346,347,348,349,350,351,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,376,378,379,380,382,391,392,393,394,395,396,397,402,412,415,419,421,422,424,425,426,427,429,430,431,432,433,437,440,441,442,443,447,453,457,459,460,461,462,463,464,465,466,467,471,472,473,483,484,485,486,487,488,494,495,497,498,499,500,501,502,503,504,],[-189,-197,-190,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-20,-153,-154,-188,188,-198,-199,-201,-202,-119,-176,-251,-112,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-214,-67,-4,295,298,299,-118,-193,305,-196,306,-205,-212,-207,-211,-200,-203,-204,-149,-150,-253,-252,333,334,345,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-215,-153,-216,-153,-167,-168,378,-153,385,-284,-191,388,389,-291,-158,-192,-194,-195,-153,-210,-216,378,-174,-5,-7,-113,414,-30,416,417,-100,-96,-33,422,-39,-41,-35,-36,424,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-166,-153,-217,429,-224,432,-220,-165,442,-156,-159,-213,-206,-208,-209,-148,-175,-68,458,-32,-34,-59,463,-218,-222,-226,466,467,-225,-221,-285,-292,-295,471,-158,-146,-147,480,482,-98,-40,-42,-43,-47,-223,-227,-219,-155,-157,488,-50,495,-48,-293,-294,-160,-47,-44,500,502,-49,-51,-47,-45,504,-46,]),'COLON':([19,21,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,73,75,77,91,106,108,120,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,171,173,175,186,188,200,202,217,251,252,253,255,256,257,263,264,265,269,287,290,291,294,298,304,305,306,319,321,327,328,331,333,341,342,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,382,402,407,412,415,421,422,424,425,447,449,453,460,463,464,471,483,484,485,494,495,498,499,500,502,504,],[-189,-190,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-20,-154,-188,-176,215,218,-112,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-114,-67,-4,-193,-196,-149,-150,329,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-153,-167,-168,-153,-191,-192,-194,-195,408,-174,-5,-7,-113,-30,-100,420,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-166,-165,-148,451,-175,-68,-32,-34,-59,464,-146,408,-147,-98,-43,-47,-155,-50,494,-48,-47,-44,501,-49,-51,-45,-46,]),'LBRACKET':([19,20,21,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,73,74,75,77,80,81,82,83,88,91,106,124,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,170,175,183,186,188,193,194,195,196,200,202,251,252,263,264,265,269,287,288,289,290,291,292,294,298,304,305,306,311,312,321,327,328,333,345,349,350,356,357,372,373,377,378,380,382,398,402,412,421,422,427,429,432,433,447,453,465,466,467,471,],[78,-197,-190,-153,-120,-121,-122,-123,-128,-129,-130,-131,-132,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,-186,-187,-20,-153,-154,78,-198,-199,-201,-202,-119,-176,-4,247,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,293,-4,-118,-193,-196,293,-200,-203,-204,-149,-150,-37,-38,-23,-24,-25,-26,-153,293,375,-167,-168,-153,-153,-191,-192,-194,-195,293,-153,-174,-5,-7,-30,-33,-35,-36,-8,-6,-166,375,293,-224,-220,-165,293,-148,-175,-32,-34,-222,-226,-225,-221,-146,-147,-223,-227,-219,-155,]),'PP_END_DEFINE':([20,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,63,65,67,73,75,80,81,82,83,91,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,168,169,170,171,173,175,194,195,196,200,202,251,252,253,255,256,257,263,264,265,269,286,287,288,289,290,291,294,295,321,327,328,333,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,378,380,382,384,385,402,412,415,421,422,424,426,427,429,432,433,436,447,453,460,463,465,466,467,471,495,502,504,],[-197,-133,-134,-135,-136,-137,-138,-139,-140,-141,-142,-143,-144,-145,-183,-184,-185,167,177,179,-20,-154,-198,-199,-201,-202,-176,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,284,285,-214,-114,-67,-4,-200,-203,-204,-149,-150,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-215,-153,-216,-153,-167,-168,-153,383,-174,-5,-7,-30,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-166,-153,-217,-224,-220,-165,434,435,-148,-175,-68,-32,-34,-59,-218,-222,-226,-225,-221,468,-146,-147,-98,-43,-223,-227,-219,-155,-44,-45,-46,]),'RBRACE':([22,60,61,73,93,94,95,96,97,98,99,100,101,102,103,104,105,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,171,173,175,203,204,205,206,208,209,211,212,213,214,219,229,230,231,251,252,253,255,256,257,263,264,265,269,314,317,318,322,324,325,326,327,328,330,333,339,340,341,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,399,400,401,403,405,409,410,411,413,415,421,422,424,445,446,448,454,455,456,460,463,474,475,481,490,491,492,495,502,504,],[-115,-247,94,-20,-248,-242,208,211,213,-249,-233,-234,-235,-236,-237,-238,-251,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-114,-67,-4,321,-177,-179,-181,-243,-250,-244,325,-246,-253,-252,-262,-263,-264,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-228,402,-161,-178,412,-245,-239,-5,-7,-241,-30,-261,-265,-100,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,445,-231,447,-162,-164,453,-180,-182,-240,-68,-32,-34,-59,-229,474,-163,-254,-256,-257,-98,-43,-230,-232,-259,-255,-258,-260,-44,-45,-46,]),'CASE':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,107,-248,-242,107,107,-249,-233,-234,-235,-236,-237,-238,-251,107,-243,-250,-244,107,-246,-253,107,107,-252,-262,-263,-264,-245,-239,107,-241,-261,-265,-240,107,107,107,-254,-256,-257,107,107,-259,107,-255,-258,-260,]),'DEFAULT':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,108,-248,-242,108,108,-249,-233,-234,-235,-236,-237,-238,-251,108,-243,-250,-244,108,-246,-253,108,108,-252,-262,-263,-264,-245,-239,108,-241,-261,-265,-240,108,108,108,-254,-256,-257,108,108,-259,108,-255,-258,-260,]),'IF':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,110,-248,-242,110,110,-249,-233,-234,-235,-236,-237,-238,-251,110,-243,-250,-244,110,-246,-253,110,110,-252,-262,-263,-264,-245,-239,110,-241,-261,-265,-240,110,110,110,-254,-256,-257,110,110,-259,110,-255,-258,-260,]),'SWITCH':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,112,-248,-242,112,112,-249,-233,-234,-235,-236,-237,-238,-251,112,-243,-250,-244,112,-246,-253,112,112,-252,-262,-263,-264,-245,-239,112,-241,-261,-265,-240,112,112,112,-254,-256,-257,112,112,-259,112,-255,-258,-260,]),'WHILE':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,226,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,113,-248,-242,113,113,-249,-233,-234,-235,-236,-237,-238,-251,113,-243,-250,-244,113,-246,-253,113,113,-252,337,-262,-263,-264,-245,-239,113,-241,-261,-265,-240,113,113,113,-254,-256,-257,113,113,-259,113,-255,-258,-260,]),'DO':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,114,-248,-242,114,114,-249,-233,-234,-235,-236,-237,-238,-251,114,-243,-250,-244,114,-246,-253,114,114,-252,-262,-263,-264,-245,-239,114,-241,-261,-265,-240,114,114,114,-254,-256,-257,114,114,-259,114,-255,-258,-260,]),'FOR':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,115,-248,-242,115,115,-249,-233,-234,-235,-236,-237,-238,-251,115,-243,-250,-244,115,-246,-253,115,115,-252,-262,-263,-264,-245,-239,115,-241,-261,-265,-240,115,115,115,-254,-256,-257,115,115,-259,115,-255,-258,-260,]),'GOTO':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,116,-248,-242,116,116,-249,-233,-234,-235,-236,-237,-238,-251,116,-243,-250,-244,116,-246,-253,116,116,-252,-262,-263,-264,-245,-239,116,-241,-261,-265,-240,116,116,116,-254,-256,-257,116,116,-259,116,-255,-258,-260,]),'CONTINUE':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,117,-248,-242,117,117,-249,-233,-234,-235,-236,-237,-238,-251,117,-243,-250,-244,117,-246,-253,117,117,-252,-262,-263,-264,-245,-239,117,-241,-261,-265,-240,117,117,117,-254,-256,-257,117,117,-259,117,-255,-258,-260,]),'BREAK':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,118,-248,-242,118,118,-249,-233,-234,-235,-236,-237,-238,-251,118,-243,-250,-244,118,-246,-253,118,118,-252,-262,-263,-264,-245,-239,118,-241,-261,-265,-240,118,118,118,-254,-256,-257,118,118,-259,118,-255,-258,-260,]),'RETURN':([22,60,61,93,94,95,96,98,99,100,101,102,103,104,105,114,208,209,211,212,213,214,215,218,219,229,230,231,325,326,329,330,339,340,413,414,416,417,454,455,456,458,479,481,482,490,491,492,],[-115,-247,119,-248,-242,119,119,-249,-233,-234,-235,-236,-237,-238,-251,119,-243,-250,-244,119,-246,-253,119,119,-252,-262,-263,-264,-245,-239,119,-241,-261,-265,-240,119,119,119,-254,-256,-257,119,119,-259,119,-255,-258,-260,]),'INC_OP':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,124,125,126,127,129,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,174,175,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,254,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,356,357,375,385,408,413,414,416,417,418,419,420,421,422,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,125,125,-20,125,-248,-242,125,125,-249,-233,-234,-235,-236,-237,-238,-251,-4,125,125,125,125,251,125,125,125,125,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,125,-4,125,-243,-250,-244,125,-246,-253,125,125,-252,125,125,125,125,125,-262,-263,-264,125,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,125,125,125,125,-37,-38,125,125,125,125,-23,-24,-25,125,125,-26,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,-245,-239,-5,-7,125,-241,-30,125,125,-261,-265,-33,-35,-36,-8,-6,125,125,125,-240,125,125,125,125,125,125,-32,-34,125,125,125,125,-254,-256,-257,125,125,-259,125,-255,-258,-260,125,]),'DEC_OP':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,124,125,126,127,129,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,174,175,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,254,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,356,357,375,385,408,413,414,416,417,418,419,420,421,422,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,126,126,-20,126,-248,-242,126,126,-249,-233,-234,-235,-236,-237,-238,-251,-4,126,126,126,126,252,126,126,126,126,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,126,-4,126,-243,-250,-244,126,-246,-253,126,126,-252,126,126,126,126,126,-262,-263,-264,126,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,126,126,126,126,-37,-38,126,126,126,126,-23,-24,-25,126,126,-26,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,-245,-239,-5,-7,126,-241,-30,126,126,-261,-265,-33,-35,-36,-8,-6,126,126,126,-240,126,126,126,126,126,126,-32,-34,126,126,126,126,-254,-256,-257,126,126,-259,126,-255,-258,-260,126,]),'SIZEOF':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,129,129,129,-248,-242,129,129,-249,-233,-234,-235,-236,-237,-238,-251,129,129,129,129,129,129,129,129,-61,-62,-63,-64,-65,-66,129,129,-243,-250,-244,129,-246,-253,129,129,-252,129,129,129,129,129,-262,-263,-264,129,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,-245,-239,129,-241,129,129,-261,-265,129,129,129,-240,129,129,129,129,129,129,129,129,129,129,-254,-256,-257,129,129,-259,129,-255,-258,-260,129,]),'AND':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,122,124,125,126,127,128,129,130,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,174,175,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,253,254,255,256,257,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,375,385,408,413,414,415,416,417,418,419,420,421,422,423,424,444,446,451,454,455,456,458,463,479,481,482,490,491,492,493,495,502,504,],[-115,-247,133,133,-20,133,-248,-242,133,133,-249,-233,-234,-235,-236,-237,-238,-251,-4,133,133,133,133,-67,-54,133,133,133,-69,133,-60,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,268,-87,-84,-79,-76,-73,-67,133,-4,133,-243,-250,-244,133,-246,-253,133,133,-252,133,133,133,133,133,-262,-263,-264,133,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,133,133,133,133,-37,-38,-55,133,-56,-57,-58,133,133,133,-23,-24,-25,133,133,-26,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-245,-239,-5,-7,133,-241,-30,133,133,-261,-265,-33,-35,-36,268,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,133,133,133,-240,133,-68,133,133,133,133,133,-32,-34,133,-59,133,133,133,-254,-256,-257,133,-43,133,-259,133,-255,-258,-260,133,-44,-45,-46,]),'PLUS':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,122,124,125,126,127,128,129,130,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,164,165,173,174,175,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,253,254,255,256,257,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,356,357,365,366,367,368,369,370,371,375,385,408,413,414,415,416,417,418,419,420,421,422,423,424,444,446,451,454,455,456,458,463,479,481,482,490,491,492,493,495,502,504,],[-115,-247,135,135,-20,135,-248,-242,135,135,-249,-233,-234,-235,-236,-237,-238,-251,-4,135,135,135,135,-67,-54,135,135,135,-69,135,-60,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,278,-73,-67,135,-4,135,-243,-250,-244,135,-246,-253,135,135,-252,135,135,135,135,135,-262,-263,-264,135,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,135,135,135,135,-37,-38,-55,135,-56,-57,-58,135,135,135,-23,-24,-25,135,135,-26,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,-245,-239,-5,-7,135,-241,-30,135,135,-261,-265,-33,-35,-36,-8,-6,278,278,-74,-75,-70,-71,-72,135,135,135,-240,135,-68,135,135,135,135,135,-32,-34,135,-59,135,135,135,-254,-256,-257,135,-43,135,-259,135,-255,-258,-260,135,-44,-45,-46,]),'MINUS':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,106,107,111,114,119,122,124,125,126,127,128,129,130,132,133,134,135,136,137,138,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,164,165,173,174,175,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,251,252,253,254,255,256,257,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,327,328,329,330,333,334,338,339,340,345,349,350,356,357,365,366,367,368,369,370,371,375,385,408,413,414,415,416,417,418,419,420,421,422,423,424,444,446,451,454,455,456,458,463,479,481,482,490,491,492,493,495,502,504,],[-115,-247,136,136,-20,136,-248,-242,136,136,-249,-233,-234,-235,-236,-237,-238,-251,-4,136,136,136,136,-67,-54,136,136,136,-69,136,-60,-31,-61,-62,-63,-64,-65,-66,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,279,-73,-67,136,-4,136,-243,-250,-244,136,-246,-253,136,136,-252,136,136,136,136,136,-262,-263,-264,136,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,136,136,136,136,-37,-38,-55,136,-56,-57,-58,136,136,136,-23,-24,-25,136,136,-26,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,-245,-239,-5,-7,136,-241,-30,136,136,-261,-265,-33,-35,-36,-8,-6,279,279,-74,-75,-70,-71,-72,136,136,136,-240,136,-68,136,136,136,136,136,-32,-34,136,-59,136,136,136,-254,-256,-257,136,-43,136,-259,136,-255,-258,-260,136,-44,-45,-46,]),'NOT':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,137,137,137,-248,-242,137,137,-249,-233,-234,-235,-236,-237,-238,-251,137,137,137,137,137,137,137,137,-61,-62,-63,-64,-65,-66,137,137,-243,-250,-244,137,-246,-253,137,137,-252,137,137,137,137,137,-262,-263,-264,137,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,-245,-239,137,-241,137,137,-261,-265,137,137,137,-240,137,137,137,137,137,137,137,137,137,137,-254,-256,-257,137,137,-259,137,-255,-258,-260,137,]),'LNOT':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,138,138,138,-248,-242,138,138,-249,-233,-234,-235,-236,-237,-238,-251,138,138,138,138,138,138,138,138,-61,-62,-63,-64,-65,-66,138,138,-243,-250,-244,138,-246,-253,138,138,-252,138,138,138,138,138,-262,-263,-264,138,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,-245,-239,138,-241,138,138,-261,-265,138,138,138,-240,138,138,138,138,138,138,138,138,138,138,-254,-256,-257,138,138,-259,138,-255,-258,-260,138,]),'__ASM__':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,139,139,139,-248,-242,139,139,-249,-233,-234,-235,-236,-237,-238,-251,139,139,139,139,139,139,139,139,-61,-62,-63,-64,-65,-66,139,139,-243,-250,-244,139,-246,-253,139,139,-252,139,139,139,139,139,-262,-263,-264,139,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,-245,-239,139,-241,139,139,-261,-265,139,139,139,-240,139,139,139,139,139,139,139,139,139,139,-254,-256,-257,139,139,-259,139,-255,-258,-260,139,]),'PP_MACRO_PARAM':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,140,144,146,158,160,174,176,198,208,209,211,212,213,214,215,216,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,386,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,146,146,-20,146,-248,-242,146,146,-249,-233,-234,-235,-236,-237,-238,-251,146,146,146,146,146,146,146,146,-61,-62,-63,-64,-65,-66,-21,265,-25,-22,269,146,297,146,-243,-250,-244,146,-246,-253,146,328,146,-252,146,146,146,146,146,-262,-263,-264,146,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,146,146,146,146,146,146,146,146,-23,-24,-25,146,356,146,-26,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,-245,-239,146,-241,146,146,-261,-265,146,146,437,146,-240,146,146,146,146,146,146,146,146,146,146,-254,-256,-257,146,146,-259,146,-255,-258,-260,146,]),'I_CONST_HEX':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,147,147,147,-248,-242,147,147,-249,-233,-234,-235,-236,-237,-238,-251,147,147,147,147,147,147,147,147,-61,-62,-63,-64,-65,-66,147,147,147,-243,-250,-244,147,-246,-253,147,147,-252,147,147,147,147,147,-262,-263,-264,147,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,-245,-239,147,-241,147,147,-261,-265,147,147,147,147,-240,147,147,147,147,147,147,147,147,147,147,-254,-256,-257,147,147,147,-259,147,-255,-258,-260,147,]),'I_CONST_DEC':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,148,148,148,-248,-242,148,148,-249,-233,-234,-235,-236,-237,-238,-251,148,148,148,148,148,148,148,148,-61,-62,-63,-64,-65,-66,148,148,148,-243,-250,-244,148,-246,-253,148,148,-252,148,148,148,148,148,-262,-263,-264,148,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-245,-239,148,-241,148,148,-261,-265,148,148,148,148,-240,148,148,148,148,148,148,148,148,148,148,-254,-256,-257,148,148,148,-259,148,-255,-258,-260,148,]),'I_CONST_OCT':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,149,149,149,-248,-242,149,149,-249,-233,-234,-235,-236,-237,-238,-251,149,149,149,149,149,149,149,149,-61,-62,-63,-64,-65,-66,149,149,149,-243,-250,-244,149,-246,-253,149,149,-252,149,149,149,149,149,-262,-263,-264,149,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-245,-239,149,-241,149,149,-261,-265,149,149,149,149,-240,149,149,149,149,149,149,149,149,149,149,-254,-256,-257,149,149,149,-259,149,-255,-258,-260,149,]),'I_CONST_BIN':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,150,150,150,-248,-242,150,150,-249,-233,-234,-235,-236,-237,-238,-251,150,150,150,150,150,150,150,150,-61,-62,-63,-64,-65,-66,150,150,150,-243,-250,-244,150,-246,-253,150,150,-252,150,150,150,150,150,-262,-263,-264,150,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-245,-239,150,-241,150,150,-261,-265,150,150,150,150,-240,150,150,150,150,150,150,150,150,150,150,-254,-256,-257,150,150,150,-259,150,-255,-258,-260,150,]),'F_CONST_1':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,151,151,151,-248,-242,151,151,-249,-233,-234,-235,-236,-237,-238,-251,151,151,151,151,151,151,151,151,-61,-62,-63,-64,-65,-66,151,151,151,-243,-250,-244,151,-246,-253,151,151,-252,151,151,151,151,151,-262,-263,-264,151,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-245,-239,151,-241,151,151,-261,-265,151,151,151,151,-240,151,151,151,151,151,151,151,151,151,151,-254,-256,-257,151,151,151,-259,151,-255,-258,-260,151,]),'F_CONST_2':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,152,152,152,-248,-242,152,152,-249,-233,-234,-235,-236,-237,-238,-251,152,152,152,152,152,152,152,152,-61,-62,-63,-64,-65,-66,152,152,152,-243,-250,-244,152,-246,-253,152,152,-252,152,152,152,152,152,-262,-263,-264,152,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-245,-239,152,-241,152,152,-261,-265,152,152,152,152,-240,152,152,152,152,152,152,152,152,152,152,-254,-256,-257,152,152,152,-259,152,-255,-258,-260,152,]),'F_CONST_3':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,153,153,153,-248,-242,153,153,-249,-233,-234,-235,-236,-237,-238,-251,153,153,153,153,153,153,153,153,-61,-62,-63,-64,-65,-66,153,153,153,-243,-250,-244,153,-246,-253,153,153,-252,153,153,153,153,153,-262,-263,-264,153,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,-245,-239,153,-241,153,153,-261,-265,153,153,153,153,-240,153,153,153,153,153,153,153,153,153,153,-254,-256,-257,153,153,153,-259,153,-255,-258,-260,153,]),'F_CONST_4':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,154,154,154,-248,-242,154,154,-249,-233,-234,-235,-236,-237,-238,-251,154,154,154,154,154,154,154,154,-61,-62,-63,-64,-65,-66,154,154,154,-243,-250,-244,154,-246,-253,154,154,-252,154,154,154,154,154,-262,-263,-264,154,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-245,-239,154,-241,154,154,-261,-265,154,154,154,154,-240,154,154,154,154,154,154,154,154,154,154,-254,-256,-257,154,154,154,-259,154,-255,-258,-260,154,]),'F_CONST_5':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,155,155,155,-248,-242,155,155,-249,-233,-234,-235,-236,-237,-238,-251,155,155,155,155,155,155,155,155,-61,-62,-63,-64,-65,-66,155,155,155,-243,-250,-244,155,-246,-253,155,155,-252,155,155,155,155,155,-262,-263,-264,155,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-245,-239,155,-241,155,155,-261,-265,155,155,155,155,-240,155,155,155,155,155,155,155,155,155,155,-254,-256,-257,155,155,155,-259,155,-255,-258,-260,155,]),'F_CONST_6':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,156,156,156,-248,-242,156,156,-249,-233,-234,-235,-236,-237,-238,-251,156,156,156,156,156,156,156,156,-61,-62,-63,-64,-65,-66,156,156,156,-243,-250,-244,156,-246,-253,156,156,-252,156,156,156,156,156,-262,-263,-264,156,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-245,-239,156,-241,156,156,-261,-265,156,156,156,156,-240,156,156,156,156,156,156,156,156,156,156,-254,-256,-257,156,156,156,-259,156,-255,-258,-260,156,]),'CHARACTER_CONSTANT':([22,60,61,63,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,174,182,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,390,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,469,479,481,482,490,491,492,493,],[-115,-247,157,157,157,-248,-242,157,157,-249,-233,-234,-235,-236,-237,-238,-251,157,157,157,157,157,157,157,157,-61,-62,-63,-64,-65,-66,157,157,157,-243,-250,-244,157,-246,-253,157,157,-252,157,157,157,157,157,-262,-263,-264,157,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-245,-239,157,-241,157,157,-261,-265,157,157,157,157,-240,157,157,157,157,157,157,157,157,157,157,-254,-256,-257,157,157,157,-259,157,-255,-258,-260,157,]),'PP_STRINGIFY':([22,60,61,63,73,78,93,94,95,96,98,99,100,101,102,103,104,105,107,111,114,119,125,126,127,129,133,134,135,136,137,138,140,144,146,158,174,198,208,209,211,212,213,214,215,218,219,220,221,224,225,227,229,230,231,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,254,258,259,262,263,264,265,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,325,326,329,330,334,338,339,340,375,385,408,413,414,416,417,418,419,420,423,444,446,451,454,455,456,458,479,481,482,490,491,492,493,],[-115,-247,160,160,-20,160,-248,-242,160,160,-249,-233,-234,-235,-236,-237,-238,-251,160,160,160,160,160,160,160,160,-61,-62,-63,-64,-65,-66,-21,160,-25,-22,160,160,-243,-250,-244,160,-246,-253,160,160,-252,160,160,160,160,160,-262,-263,-264,160,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,160,160,160,160,160,160,160,160,-23,-24,-25,160,160,-26,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-245,-239,160,-241,160,160,-261,-265,160,160,160,-240,160,160,160,160,160,160,160,160,160,160,-254,-256,-257,160,160,-259,160,-255,-258,-260,160,]),'PRAGMA_END':([68,70,71,72,73,181,299,388,389,],[180,-296,-298,-299,-20,-297,387,438,439,]),'PERIOD':([73,106,124,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,175,251,252,263,264,265,269,327,328,333,345,349,350,356,357,421,422,],[-20,-4,249,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-4,-37,-38,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-32,-34,]),'PTR_OP':([73,106,124,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,175,251,252,263,264,265,269,327,328,333,345,349,350,356,357,421,422,],[-20,-4,250,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-4,-37,-38,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-32,-34,]),'MUL_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,235,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'DIV_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,236,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'MOD_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,237,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'ADD_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,238,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'SUB_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,239,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'LEFT_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,240,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'RIGHT_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,241,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'AND_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,242,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'XOR_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,243,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'OR_ASSIGN':([73,106,122,124,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,415,421,422,424,463,495,502,504,],[-20,-4,244,-54,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,-68,-32,-34,-59,-43,-44,-45,-46,]),'DIVIDE':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,281,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,281,281,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'MOD':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,282,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,282,282,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'LEFT_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,276,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,276,276,276,276,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'RIGHT_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,277,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,277,277,277,277,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'LT':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,272,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,272,272,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'GT':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,273,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,273,273,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'LE_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,274,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,274,274,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'GE_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,275,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,275,275,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'EQ_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,270,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,270,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'NE_OP':([73,106,122,124,128,130,132,140,142,143,144,146,147,148,149,150,151,152,153,154,155,156,157,158,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,271,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,-8,-6,271,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'XOR':([73,106,122,124,128,130,132,140,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,-27,-28,-29,266,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,266,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'OR':([73,106,122,124,128,130,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,-31,-21,262,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-33,-35,-36,262,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'AND_OP':([73,106,122,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,-54,-69,-60,259,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,259,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'CONDOP':([73,106,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,245,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'OR_OP':([73,106,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,173,175,251,252,253,255,256,257,263,264,265,269,327,328,333,343,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,415,421,422,424,463,495,502,504,],[-20,-4,-67,246,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-67,-4,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,-5,-7,-30,-96,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,-68,-32,-34,-59,-43,-44,-45,-46,]),'RBRACKET':([73,78,120,121,122,123,124,128,130,131,132,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,161,162,163,164,165,171,173,175,185,251,252,253,255,256,257,263,264,265,269,293,327,328,331,333,341,343,344,345,349,350,352,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,375,381,415,421,422,424,428,460,463,495,502,504,],[-20,186,-112,-99,-67,-97,-54,-69,-60,-95,-31,-21,-93,-27,-28,-29,-91,-25,-9,-10,-11,-12,-13,-14,-15,-16,-17,-18,-19,-22,-89,-87,-84,-79,-76,-73,-114,-67,-4,304,-37,-38,-55,-56,-57,-58,-23,-24,-25,-26,380,-5,-7,-113,-30,-100,-96,421,-33,-35,-36,-94,-92,-90,-8,-6,-88,-85,-86,-80,-81,-82,-83,-77,-78,-74,-75,-70,-71,-72,427,433,-68,-32,-34,-59,465,-98,-43,-44,-45,-46,]),'ELSE':([94,99,100,101,102,103,104,105,208,211,213,214,219,229,230,231,325,326,330,339,340,413,454,455,456,481,490,491,492,],[-242,-233,-234,-235,-236,-237,-238,-251,-243,-244,-246,-253,-252,-262,-263,-264,-245,-239,-241,-261,-265,-240,479,-256,-257,-259,-255,-258,-260,]),'PP_IDENTIFIER_PASTE':([106,146,175,328,356,],[216,267,216,267,267,]),'ELLIPSIS':([308,],[395,]),}\n\n_lr_action = {}\nfor _k, _v in _lr_action_items.items():\n   for _x,_y in zip(_v[0],_v[1]):\n      if not _x in _lr_action:  _lr_action[_x] = {}\n      _lr_action[_x][_k] = _y\ndel _lr_action_items\n\n_lr_goto_items = {'translation_unit':([0,],[1,]),'external_declaration':([1,],[2,]),'directive':([1,],[3,]),'declaration':([1,11,23,58,61,84,96,],[4,60,60,93,60,93,93,]),'function_definition':([1,],[5,]),'define':([1,],[6,]),'undefine':([1,],[7,]),'pragma':([1,],[8,]),'declaration_impl':([1,11,23,58,61,84,96,],[9,9,9,9,9,9,9,]),'declaration_specifier_list':([1,11,23,58,61,79,84,96,292,308,312,376,],[10,62,62,62,62,193,62,62,193,193,193,193,]),'declarator':([1,10,62,66,87,193,319,398,449,],[11,23,166,178,166,309,407,178,407,]),'pragma_pack':([1,],[15,]),'gcc_attributes':([1,11,13,23,25,54,58,61,63,74,79,84,96,111,166,174,201,248,258,287,289,292,294,308,309,312,316,317,320,373,376,401,407,409,423,444,452,477,],[17,17,66,86,88,89,17,17,172,183,17,17,17,172,283,172,172,172,172,372,374,377,382,17,397,398,172,172,172,426,17,172,450,172,172,172,478,489,]),'pointer':([1,10,20,62,66,80,87,170,193,319,377,398,449,],[18,18,81,18,18,194,18,288,311,18,288,311,18,]),'direct_declarator':([1,10,18,62,66,87,193,311,319,398,449,],[19,19,77,19,19,19,19,77,19,19,19,]),'init_declarator_list':([10,62,],[24,24,]),'declaration_specifier':([10,17,62,86,193,377,398,],[25,74,25,74,25,74,74,]),'init_declarator':([10,62,87,],[26,26,199,]),'storage_class_specifier':([10,17,62,86,193,377,398,],[27,27,27,27,27,27,27,]),'type_specifier':([10,17,62,86,170,172,193,319,377,398,],[28,28,28,28,290,290,28,290,28,28,]),'type_qualifier':([10,17,20,62,80,86,170,172,193,319,377,398,],[29,29,82,29,195,29,291,291,29,291,29,29,]),'function_specifier':([10,17,62,86,193,377,398,],[30,30,30,30,30,30,30,]),'struct_or_union_specifier':([10,17,62,86,170,172,193,319,377,398,],[46,46,46,46,46,46,46,46,46,46,]),'enum_specifier':([10,17,62,86,170,172,193,319,377,398,],[47,47,47,47,47,47,47,47,47,47,]),'struct_or_union':([10,17,62,86,170,172,193,319,377,398,],[54,54,54,54,54,54,54,54,54,54,]),'declaration_list':([11,23,61,],[58,84,96,]),'compound_statement':([11,23,58,61,84,95,96,114,212,215,218,329,414,416,417,458,479,482,],[59,85,92,100,197,100,100,100,100,100,100,100,100,100,100,100,100,100,]),'pragma_directive_list':([16,],[68,]),'pragma_directive':([16,68,],[70,181,]),'string_literal':([16,61,63,68,78,95,96,107,111,114,119,125,126,127,129,144,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,353,375,385,408,414,416,417,418,419,420,423,444,446,451,458,464,479,482,493,494,496,501,],[72,140,140,72,140,140,140,140,140,140,140,140,140,140,140,263,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,425,140,140,140,140,140,140,140,140,140,140,140,140,140,140,483,140,140,140,483,483,483,]),'gcc_attribute':([17,20,66,80,86,88,89,172,183,283,372,374,377,382,397,398,426,450,478,489,],[75,83,75,196,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,]),'type_qualifier_list':([20,],[80,]),'statement_list':([61,96,],[95,212,]),'statement':([61,95,96,114,212,215,218,329,414,416,417,458,479,482,],[98,209,98,226,209,326,330,413,454,455,456,481,490,492,]),'labeled_statement':([61,95,96,114,212,215,218,329,414,416,417,458,479,482,],[99,99,99,99,99,99,99,99,99,99,99,99,99,99,]),'expression_statement':([61,95,96,114,212,215,218,227,329,338,414,416,417,458,479,482,],[101,101,101,101,101,101,101,338,101,419,101,101,101,101,101,101,]),'selection_statement':([61,95,96,114,212,215,218,329,414,416,417,458,479,482,],[102,102,102,102,102,102,102,102,102,102,102,102,102,102,]),'iteration_statement':([61,95,96,114,212,215,218,329,414,416,417,458,479,482,],[103,103,103,103,103,103,103,103,103,103,103,103,103,103,]),'jump_statement':([61,95,96,114,212,215,218,329,414,416,417,458,479,482,],[104,104,104,104,104,104,104,104,104,104,104,104,104,104,]),'expression':([61,95,96,111,114,119,174,212,215,218,221,224,225,227,245,247,254,258,329,338,414,416,417,418,419,458,479,482,493,],[109,109,109,222,109,232,222,109,109,109,332,335,336,109,342,344,222,222,109,109,109,109,109,457,459,109,109,109,497,]),'assignment_expression':([61,95,96,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,247,248,254,258,315,329,338,414,416,417,418,419,423,444,446,458,479,482,493,],[120,120,120,120,120,120,120,314,120,120,120,331,120,120,120,120,341,120,120,347,120,120,314,120,120,120,120,120,120,120,461,347,314,120,120,120,120,]),'conditional_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,247,248,254,258,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[121,171,171,121,121,171,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,171,171,121,171,121,121,171,171,171,121,121,121,121,121,460,121,121,121,171,121,121,121,121,]),'unary_expression':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[122,173,173,122,122,173,122,122,122,253,255,173,257,122,122,122,122,122,122,122,122,122,122,122,122,173,122,122,122,122,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,122,173,122,173,122,173,173,173,122,122,122,122,122,173,122,122,122,173,122,122,122,122,]),'logical_or_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,247,248,254,258,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,]),'postfix_expression':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,]),'unary_operator':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,]),'cast_expression':([61,63,78,95,96,107,111,114,119,127,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[128,128,128,128,128,128,128,128,128,256,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,369,370,371,128,128,128,128,128,415,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,]),'asm_expression':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,]),'logical_and_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,343,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,]),'primary_expression':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,]),'inclusive_or_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,352,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,]),'identifier':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,216,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,267,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,327,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,357,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,]),'constant':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,182,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,390,408,414,416,417,418,419,420,423,444,446,451,458,469,479,482,493,],[143,143,143,143,143,143,143,143,143,143,143,143,143,143,300,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,441,143,143,143,143,143,143,143,143,143,143,143,143,486,143,143,143,]),'multi_string_literal':([61,63,78,95,96,107,111,114,119,125,126,127,129,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,]),'exclusive_or_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,354,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,]),'macro_param':([61,63,78,95,96,107,111,114,119,125,126,127,129,144,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,280,281,282,293,295,315,323,329,334,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[158,158,158,158,158,158,158,158,158,158,158,158,158,264,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'and_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,355,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'equality_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,358,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'relational_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,359,360,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'shift_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,361,362,363,364,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,]),'additive_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,365,366,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'multiplicative_expression':([61,63,78,95,96,107,111,114,119,174,198,212,215,218,220,221,224,225,227,233,245,246,247,248,254,258,259,262,266,268,270,271,272,273,274,275,276,277,278,279,293,295,315,323,329,338,375,385,408,414,416,417,418,419,420,423,444,446,451,458,479,482,493,],[165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,367,368,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,]),'type_name':([63,111,174,248,258,423,444,],[168,223,223,348,351,462,348,]),'constant_expression':([63,78,107,293,295,323,375,385,408,451,],[169,185,217,381,384,411,428,436,452,477,]),'specifier_qualifier_list':([63,111,174,201,248,258,316,317,320,401,409,423,444,],[170,170,170,319,170,170,319,319,319,319,319,170,170,]),'parameter_type_list':([79,292,312,376,],[187,379,379,430,]),'identifier_list':([79,],[189,]),'parameter_list':([79,292,312,376,],[190,190,190,190,]),'parameter_declaration':([79,292,308,312,376,],[192,192,396,192,192,]),'enumerator_list':([90,207,],[203,324,]),'enumerator_list_iso':([90,207,],[204,204,]),'enumerator':([90,207,322,],[205,205,410,]),'assignment_operator':([122,],[233,]),'volatile_opt':([139,],[260,]),'abstract_declarator':([170,193,377,398,],[286,310,431,431,]),'specifier_qualifier':([170,172,319,],[287,294,287,]),'direct_abstract_declarator':([170,193,288,311,377,398,],[289,289,373,373,289,289,]),'macro_parameter_list':([176,],[296,]),'pragma_pack_stack_args':([182,],[301,]),'initializer':([198,315,446,],[313,400,475,]),'member_declaration_list':([201,316,320,],[317,401,409,]),'member_declaration':([201,316,317,320,401,409,],[318,318,403,318,403,403,]),'argument_expression_list':([248,444,],[346,473,]),'gcc_attrib_list':([303,],[391,]),'gcc_attrib':([303,443,],[392,472,]),'initializer_list':([315,],[399,]),'member_declarator_list':([319,],[404,]),'member_declarator':([319,449,],[406,476,]),'str_opt_expr_pair_list':([464,494,501,],[484,498,503,]),'str_opt_expr_pair':([464,494,496,501,],[485,485,499,485,]),}\n\n_lr_goto = {}\nfor _k, _v in _lr_goto_items.items():\n   for _x, _y in zip(_v[0], _v[1]):\n       if not _x in _lr_goto: _lr_goto[_x] = {}\n       _lr_goto[_x][_k] = _y\ndel _lr_goto_items\n_lr_productions = [\n  (\"S' -> translation_unit\",\"S'\",1,None,None,None),\n  ('translation_unit -> <empty>','translation_unit',0,'p_translation_unit','cgrammar.py',124),\n  ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit','cgrammar.py',125),\n  ('translation_unit -> translation_unit directive','translation_unit',2,'p_translation_unit','cgrammar.py',126),\n  ('identifier -> IDENTIFIER','identifier',1,'p_identifier','cgrammar.py',135),\n  ('identifier -> IDENTIFIER PP_IDENTIFIER_PASTE identifier','identifier',3,'p_identifier','cgrammar.py',136),\n  ('identifier -> PP_MACRO_PARAM PP_IDENTIFIER_PASTE identifier','identifier',3,'p_identifier','cgrammar.py',137),\n  ('identifier -> IDENTIFIER PP_IDENTIFIER_PASTE PP_MACRO_PARAM','identifier',3,'p_identifier','cgrammar.py',138),\n  ('identifier -> PP_MACRO_PARAM PP_IDENTIFIER_PASTE PP_MACRO_PARAM','identifier',3,'p_identifier','cgrammar.py',139),\n  ('constant -> I_CONST_HEX','constant',1,'p_constant_integer','cgrammar.py',157),\n  ('constant -> I_CONST_DEC','constant',1,'p_constant_integer','cgrammar.py',158),\n  ('constant -> I_CONST_OCT','constant',1,'p_constant_integer','cgrammar.py',159),\n  ('constant -> I_CONST_BIN','constant',1,'p_constant_integer','cgrammar.py',160),\n  ('constant -> F_CONST_1','constant',1,'p_constant_float','cgrammar.py',173),\n  ('constant -> F_CONST_2','constant',1,'p_constant_float','cgrammar.py',174),\n  ('constant -> F_CONST_3','constant',1,'p_constant_float','cgrammar.py',175),\n  ('constant -> F_CONST_4','constant',1,'p_constant_float','cgrammar.py',176),\n  ('constant -> F_CONST_5','constant',1,'p_constant_float','cgrammar.py',177),\n  ('constant -> F_CONST_6','constant',1,'p_constant_float','cgrammar.py',178),\n  ('constant -> CHARACTER_CONSTANT','constant',1,'p_constant_character','cgrammar.py',184),\n  ('string_literal -> STRING_LITERAL','string_literal',1,'p_string_literal','cgrammar.py',192),\n  ('multi_string_literal -> string_literal','multi_string_literal',1,'p_multi_string_literal','cgrammar.py',198),\n  ('multi_string_literal -> macro_param','multi_string_literal',1,'p_multi_string_literal','cgrammar.py',199),\n  ('multi_string_literal -> multi_string_literal string_literal','multi_string_literal',2,'p_multi_string_literal','cgrammar.py',200),\n  ('multi_string_literal -> multi_string_literal macro_param','multi_string_literal',2,'p_multi_string_literal','cgrammar.py',201),\n  ('macro_param -> PP_MACRO_PARAM','macro_param',1,'p_macro_param','cgrammar.py',212),\n  ('macro_param -> PP_STRINGIFY PP_MACRO_PARAM','macro_param',2,'p_macro_param','cgrammar.py',213),\n  ('primary_expression -> identifier','primary_expression',1,'p_primary_expression','cgrammar.py',222),\n  ('primary_expression -> constant','primary_expression',1,'p_primary_expression','cgrammar.py',223),\n  ('primary_expression -> multi_string_literal','primary_expression',1,'p_primary_expression','cgrammar.py',224),\n  ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression','cgrammar.py',225),\n  ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression','cgrammar.py',234),\n  ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression','cgrammar.py',235),\n  ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression','cgrammar.py',236),\n  ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression','cgrammar.py',237),\n  ('postfix_expression -> postfix_expression PERIOD IDENTIFIER','postfix_expression',3,'p_postfix_expression','cgrammar.py',238),\n  ('postfix_expression -> postfix_expression PTR_OP IDENTIFIER','postfix_expression',3,'p_postfix_expression','cgrammar.py',239),\n  ('postfix_expression -> postfix_expression INC_OP','postfix_expression',2,'p_postfix_expression','cgrammar.py',240),\n  ('postfix_expression -> postfix_expression DEC_OP','postfix_expression',2,'p_postfix_expression','cgrammar.py',241),\n  ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','cgrammar.py',280),\n  ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','cgrammar.py',281),\n  ('argument_expression_list -> type_name','argument_expression_list',1,'p_argument_expression_list','cgrammar.py',282),\n  ('argument_expression_list -> argument_expression_list COMMA type_name','argument_expression_list',3,'p_argument_expression_list','cgrammar.py',283),\n  ('asm_expression -> __ASM__ volatile_opt LPAREN string_literal RPAREN','asm_expression',5,'p_asm_expression','cgrammar.py',293),\n  ('asm_expression -> __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list RPAREN','asm_expression',7,'p_asm_expression','cgrammar.py',294),\n  ('asm_expression -> __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN','asm_expression',9,'p_asm_expression','cgrammar.py',295),\n  ('asm_expression -> __ASM__ volatile_opt LPAREN string_literal COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list COLON str_opt_expr_pair_list RPAREN','asm_expression',11,'p_asm_expression','cgrammar.py',296),\n  ('str_opt_expr_pair_list -> <empty>','str_opt_expr_pair_list',0,'p_str_opt_expr_pair_list','cgrammar.py',309),\n  ('str_opt_expr_pair_list -> str_opt_expr_pair','str_opt_expr_pair_list',1,'p_str_opt_expr_pair_list','cgrammar.py',310),\n  ('str_opt_expr_pair_list -> str_opt_expr_pair_list COMMA str_opt_expr_pair','str_opt_expr_pair_list',3,'p_str_opt_expr_pair_list','cgrammar.py',311),\n  ('str_opt_expr_pair -> string_literal','str_opt_expr_pair',1,'p_str_opt_expr_pair','cgrammar.py',316),\n  ('str_opt_expr_pair -> string_literal LPAREN expression RPAREN','str_opt_expr_pair',4,'p_str_opt_expr_pair','cgrammar.py',317),\n  ('volatile_opt -> <empty>','volatile_opt',0,'p_volatile_opt','cgrammar.py',322),\n  ('volatile_opt -> VOLATILE','volatile_opt',1,'p_volatile_opt','cgrammar.py',323),\n  ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression','cgrammar.py',340),\n  ('unary_expression -> INC_OP unary_expression','unary_expression',2,'p_unary_expression','cgrammar.py',341),\n  ('unary_expression -> DEC_OP unary_expression','unary_expression',2,'p_unary_expression','cgrammar.py',342),\n  ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression','cgrammar.py',343),\n  ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression','cgrammar.py',344),\n  ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression','cgrammar.py',345),\n  ('unary_expression -> asm_expression','unary_expression',1,'p_unary_expression','cgrammar.py',346),\n  ('unary_operator -> AND','unary_operator',1,'p_unary_operator','cgrammar.py',363),\n  ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','cgrammar.py',364),\n  ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','cgrammar.py',365),\n  ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','cgrammar.py',366),\n  ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','cgrammar.py',367),\n  ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','cgrammar.py',368),\n  ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression','cgrammar.py',374),\n  ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression','cgrammar.py',375),\n  ('multiplicative_expression -> cast_expression','multiplicative_expression',1,'p_multiplicative_expression','cgrammar.py',391),\n  ('multiplicative_expression -> multiplicative_expression TIMES cast_expression','multiplicative_expression',3,'p_multiplicative_expression','cgrammar.py',392),\n  ('multiplicative_expression -> multiplicative_expression DIVIDE cast_expression','multiplicative_expression',3,'p_multiplicative_expression','cgrammar.py',393),\n  ('multiplicative_expression -> multiplicative_expression MOD cast_expression','multiplicative_expression',3,'p_multiplicative_expression','cgrammar.py',394),\n  ('additive_expression -> multiplicative_expression','additive_expression',1,'p_additive_expression','cgrammar.py',410),\n  ('additive_expression -> additive_expression PLUS multiplicative_expression','additive_expression',3,'p_additive_expression','cgrammar.py',411),\n  ('additive_expression -> additive_expression MINUS multiplicative_expression','additive_expression',3,'p_additive_expression','cgrammar.py',412),\n  ('shift_expression -> additive_expression','shift_expression',1,'p_shift_expression','cgrammar.py',428),\n  ('shift_expression -> shift_expression LEFT_OP additive_expression','shift_expression',3,'p_shift_expression','cgrammar.py',429),\n  ('shift_expression -> shift_expression RIGHT_OP additive_expression','shift_expression',3,'p_shift_expression','cgrammar.py',430),\n  ('relational_expression -> shift_expression','relational_expression',1,'p_relational_expression','cgrammar.py',448),\n  ('relational_expression -> relational_expression LT shift_expression','relational_expression',3,'p_relational_expression','cgrammar.py',449),\n  ('relational_expression -> relational_expression GT shift_expression','relational_expression',3,'p_relational_expression','cgrammar.py',450),\n  ('relational_expression -> relational_expression LE_OP shift_expression','relational_expression',3,'p_relational_expression','cgrammar.py',451),\n  ('relational_expression -> relational_expression GE_OP shift_expression','relational_expression',3,'p_relational_expression','cgrammar.py',452),\n  ('equality_expression -> relational_expression','equality_expression',1,'p_equality_expression','cgrammar.py',468),\n  ('equality_expression -> equality_expression EQ_OP relational_expression','equality_expression',3,'p_equality_expression','cgrammar.py',469),\n  ('equality_expression -> equality_expression NE_OP relational_expression','equality_expression',3,'p_equality_expression','cgrammar.py',470),\n  ('and_expression -> equality_expression','and_expression',1,'p_and_expression','cgrammar.py',480),\n  ('and_expression -> and_expression AND equality_expression','and_expression',3,'p_and_expression','cgrammar.py',481),\n  ('exclusive_or_expression -> and_expression','exclusive_or_expression',1,'p_exclusive_or_expression','cgrammar.py',492),\n  ('exclusive_or_expression -> exclusive_or_expression XOR and_expression','exclusive_or_expression',3,'p_exclusive_or_expression','cgrammar.py',493),\n  ('inclusive_or_expression -> exclusive_or_expression','inclusive_or_expression',1,'p_inclusive_or_expression','cgrammar.py',504),\n  ('inclusive_or_expression -> inclusive_or_expression OR exclusive_or_expression','inclusive_or_expression',3,'p_inclusive_or_expression','cgrammar.py',505),\n  ('logical_and_expression -> inclusive_or_expression','logical_and_expression',1,'p_logical_and_expression','cgrammar.py',516),\n  ('logical_and_expression -> logical_and_expression AND_OP inclusive_or_expression','logical_and_expression',3,'p_logical_and_expression','cgrammar.py',517),\n  ('logical_or_expression -> logical_and_expression','logical_or_expression',1,'p_logical_or_expression','cgrammar.py',528),\n  ('logical_or_expression -> logical_or_expression OR_OP logical_and_expression','logical_or_expression',3,'p_logical_or_expression','cgrammar.py',529),\n  ('conditional_expression -> logical_or_expression','conditional_expression',1,'p_conditional_expression','cgrammar.py',540),\n  ('conditional_expression -> logical_or_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','cgrammar.py',541),\n  ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','cgrammar.py',564),\n  ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','cgrammar.py',565),\n  ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','cgrammar.py',580),\n  ('assignment_operator -> MUL_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',581),\n  ('assignment_operator -> DIV_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',582),\n  ('assignment_operator -> MOD_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',583),\n  ('assignment_operator -> ADD_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',584),\n  ('assignment_operator -> SUB_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',585),\n  ('assignment_operator -> LEFT_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',586),\n  ('assignment_operator -> RIGHT_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',587),\n  ('assignment_operator -> AND_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',588),\n  ('assignment_operator -> XOR_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',589),\n  ('assignment_operator -> OR_ASSIGN','assignment_operator',1,'p_assignment_operator','cgrammar.py',590),\n  ('expression -> assignment_expression','expression',1,'p_expression','cgrammar.py',596),\n  ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','cgrammar.py',597),\n  ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','cgrammar.py',604),\n  ('declaration -> declaration_impl SEMI','declaration',2,'p_declaration','cgrammar.py',610),\n  ('declaration_impl -> declaration_specifier_list','declaration_impl',1,'p_declaration_impl','cgrammar.py',618),\n  ('declaration_impl -> declaration_specifier_list init_declarator_list','declaration_impl',2,'p_declaration_impl','cgrammar.py',619),\n  ('declaration_specifier_list -> gcc_attributes declaration_specifier gcc_attributes','declaration_specifier_list',3,'p_declaration_specifier_list','cgrammar.py',638),\n  ('declaration_specifier_list -> declaration_specifier_list declaration_specifier gcc_attributes','declaration_specifier_list',3,'p_declaration_specifier_list','cgrammar.py',639),\n  ('declaration_specifier -> storage_class_specifier','declaration_specifier',1,'p_declaration_specifier','cgrammar.py',652),\n  ('declaration_specifier -> type_specifier','declaration_specifier',1,'p_declaration_specifier','cgrammar.py',653),\n  ('declaration_specifier -> type_qualifier','declaration_specifier',1,'p_declaration_specifier','cgrammar.py',654),\n  ('declaration_specifier -> function_specifier','declaration_specifier',1,'p_declaration_specifier','cgrammar.py',655),\n  ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','cgrammar.py',661),\n  ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','cgrammar.py',662),\n  ('init_declarator -> declarator gcc_attributes','init_declarator',2,'p_init_declarator','cgrammar.py',671),\n  ('init_declarator -> declarator gcc_attributes EQUALS initializer','init_declarator',4,'p_init_declarator','cgrammar.py',672),\n  ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','cgrammar.py',683),\n  ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','cgrammar.py',684),\n  ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','cgrammar.py',685),\n  ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','cgrammar.py',686),\n  ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','cgrammar.py',687),\n  ('type_specifier -> VOID','type_specifier',1,'p_type_specifier','cgrammar.py',693),\n  ('type_specifier -> _BOOL','type_specifier',1,'p_type_specifier','cgrammar.py',694),\n  ('type_specifier -> CHAR','type_specifier',1,'p_type_specifier','cgrammar.py',695),\n  ('type_specifier -> SHORT','type_specifier',1,'p_type_specifier','cgrammar.py',696),\n  ('type_specifier -> INT','type_specifier',1,'p_type_specifier','cgrammar.py',697),\n  ('type_specifier -> LONG','type_specifier',1,'p_type_specifier','cgrammar.py',698),\n  ('type_specifier -> FLOAT','type_specifier',1,'p_type_specifier','cgrammar.py',699),\n  ('type_specifier -> DOUBLE','type_specifier',1,'p_type_specifier','cgrammar.py',700),\n  ('type_specifier -> SIGNED','type_specifier',1,'p_type_specifier','cgrammar.py',701),\n  ('type_specifier -> UNSIGNED','type_specifier',1,'p_type_specifier','cgrammar.py',702),\n  ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','cgrammar.py',703),\n  ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','cgrammar.py',704),\n  ('type_specifier -> TYPE_NAME','type_specifier',1,'p_type_specifier','cgrammar.py',705),\n  ('struct_or_union_specifier -> struct_or_union gcc_attributes IDENTIFIER LBRACE member_declaration_list RBRACE','struct_or_union_specifier',6,'p_struct_or_union_specifier','cgrammar.py',714),\n  ('struct_or_union_specifier -> struct_or_union gcc_attributes TYPE_NAME LBRACE member_declaration_list RBRACE','struct_or_union_specifier',6,'p_struct_or_union_specifier','cgrammar.py',715),\n  ('struct_or_union_specifier -> struct_or_union gcc_attributes LBRACE member_declaration_list RBRACE','struct_or_union_specifier',5,'p_struct_or_union_specifier','cgrammar.py',716),\n  ('struct_or_union_specifier -> struct_or_union gcc_attributes IDENTIFIER','struct_or_union_specifier',3,'p_struct_or_union_specifier','cgrammar.py',717),\n  ('struct_or_union_specifier -> struct_or_union gcc_attributes TYPE_NAME','struct_or_union_specifier',3,'p_struct_or_union_specifier','cgrammar.py',718),\n  ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','cgrammar.py',743),\n  ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','cgrammar.py',744),\n  ('gcc_attributes -> <empty>','gcc_attributes',0,'p_gcc_attributes','cgrammar.py',750),\n  ('gcc_attributes -> gcc_attributes gcc_attribute','gcc_attributes',2,'p_gcc_attributes','cgrammar.py',751),\n  ('gcc_attribute -> __ATTRIBUTE__ LPAREN LPAREN gcc_attrib_list RPAREN RPAREN','gcc_attribute',6,'p_gcc_attribute','cgrammar.py',762),\n  ('gcc_attrib_list -> gcc_attrib','gcc_attrib_list',1,'p_gcc_attrib_list','cgrammar.py',769),\n  ('gcc_attrib_list -> gcc_attrib_list COMMA gcc_attrib','gcc_attrib_list',3,'p_gcc_attrib_list','cgrammar.py',770),\n  ('gcc_attrib -> <empty>','gcc_attrib',0,'p_gcc_attrib','cgrammar.py',779),\n  ('gcc_attrib -> IDENTIFIER','gcc_attrib',1,'p_gcc_attrib','cgrammar.py',780),\n  ('gcc_attrib -> IDENTIFIER LPAREN argument_expression_list RPAREN','gcc_attrib',4,'p_gcc_attrib','cgrammar.py',781),\n  ('member_declaration_list -> member_declaration','member_declaration_list',1,'p_member_declaration_list','cgrammar.py',794),\n  ('member_declaration_list -> member_declaration_list member_declaration','member_declaration_list',2,'p_member_declaration_list','cgrammar.py',795),\n  ('member_declaration -> specifier_qualifier_list member_declarator_list SEMI','member_declaration',3,'p_member_declaration','cgrammar.py',804),\n  ('member_declaration -> specifier_qualifier_list SEMI','member_declaration',2,'p_member_declaration','cgrammar.py',805),\n  ('specifier_qualifier_list -> gcc_attributes specifier_qualifier gcc_attributes','specifier_qualifier_list',3,'p_specifier_qualifier_list','cgrammar.py',826),\n  ('specifier_qualifier_list -> specifier_qualifier_list specifier_qualifier gcc_attributes','specifier_qualifier_list',3,'p_specifier_qualifier_list','cgrammar.py',827),\n  ('specifier_qualifier -> type_specifier','specifier_qualifier',1,'p_specifier_qualifier','cgrammar.py',836),\n  ('specifier_qualifier -> type_qualifier','specifier_qualifier',1,'p_specifier_qualifier','cgrammar.py',837),\n  ('member_declarator_list -> member_declarator','member_declarator_list',1,'p_member_declarator_list','cgrammar.py',843),\n  ('member_declarator_list -> member_declarator_list COMMA member_declarator','member_declarator_list',3,'p_member_declarator_list','cgrammar.py',844),\n  ('member_declarator -> declarator gcc_attributes','member_declarator',2,'p_member_declarator','cgrammar.py',853),\n  ('member_declarator -> COLON constant_expression gcc_attributes','member_declarator',3,'p_member_declarator','cgrammar.py',854),\n  ('member_declarator -> declarator COLON constant_expression gcc_attributes','member_declarator',4,'p_member_declarator','cgrammar.py',855),\n  ('enum_specifier -> ENUM LBRACE enumerator_list RBRACE','enum_specifier',4,'p_enum_specifier','cgrammar.py',870),\n  ('enum_specifier -> ENUM IDENTIFIER LBRACE enumerator_list RBRACE','enum_specifier',5,'p_enum_specifier','cgrammar.py',871),\n  ('enum_specifier -> ENUM IDENTIFIER','enum_specifier',2,'p_enum_specifier','cgrammar.py',872),\n  ('enumerator_list -> enumerator_list_iso','enumerator_list',1,'p_enumerator_list','cgrammar.py',886),\n  ('enumerator_list -> enumerator_list_iso COMMA','enumerator_list',2,'p_enumerator_list','cgrammar.py',887),\n  ('enumerator_list_iso -> enumerator','enumerator_list_iso',1,'p_enumerator_list_iso','cgrammar.py',895),\n  ('enumerator_list_iso -> enumerator_list_iso COMMA enumerator','enumerator_list_iso',3,'p_enumerator_list_iso','cgrammar.py',896),\n  ('enumerator -> IDENTIFIER','enumerator',1,'p_enumerator','cgrammar.py',905),\n  ('enumerator -> IDENTIFIER EQUALS constant_expression','enumerator',3,'p_enumerator','cgrammar.py',906),\n  ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','cgrammar.py',915),\n  ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','cgrammar.py',916),\n  ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','cgrammar.py',917),\n  ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','cgrammar.py',923),\n  ('function_specifier -> _NORETURN','function_specifier',1,'p_function_specifier','cgrammar.py',924),\n  ('declarator -> pointer direct_declarator','declarator',2,'p_declarator','cgrammar.py',929),\n  ('declarator -> direct_declarator','declarator',1,'p_declarator','cgrammar.py',930),\n  ('direct_declarator -> IDENTIFIER','direct_declarator',1,'p_direct_declarator','cgrammar.py',944),\n  ('direct_declarator -> LPAREN gcc_attributes declarator RPAREN','direct_declarator',4,'p_direct_declarator','cgrammar.py',945),\n  ('direct_declarator -> direct_declarator LBRACKET constant_expression RBRACKET','direct_declarator',4,'p_direct_declarator','cgrammar.py',946),\n  ('direct_declarator -> direct_declarator LBRACKET RBRACKET','direct_declarator',3,'p_direct_declarator','cgrammar.py',947),\n  ('direct_declarator -> direct_declarator LPAREN parameter_type_list RPAREN','direct_declarator',4,'p_direct_declarator','cgrammar.py',948),\n  ('direct_declarator -> direct_declarator LPAREN identifier_list RPAREN','direct_declarator',4,'p_direct_declarator','cgrammar.py',949),\n  ('direct_declarator -> direct_declarator LPAREN RPAREN','direct_declarator',3,'p_direct_declarator','cgrammar.py',950),\n  ('pointer -> TIMES','pointer',1,'p_pointer','cgrammar.py',980),\n  ('pointer -> TIMES type_qualifier_list','pointer',2,'p_pointer','cgrammar.py',981),\n  ('pointer -> TIMES pointer','pointer',2,'p_pointer','cgrammar.py',982),\n  ('pointer -> TIMES type_qualifier_list pointer','pointer',3,'p_pointer','cgrammar.py',983),\n  ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','cgrammar.py',1005),\n  ('type_qualifier_list -> gcc_attribute','type_qualifier_list',1,'p_type_qualifier_list','cgrammar.py',1006),\n  ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','cgrammar.py',1007),\n  ('type_qualifier_list -> type_qualifier_list gcc_attribute','type_qualifier_list',2,'p_type_qualifier_list','cgrammar.py',1008),\n  ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','cgrammar.py',1017),\n  ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','cgrammar.py',1018),\n  ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','cgrammar.py',1027),\n  ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','cgrammar.py',1028),\n  ('parameter_declaration -> declaration_specifier_list declarator gcc_attributes','parameter_declaration',3,'p_parameter_declaration','cgrammar.py',1037),\n  ('parameter_declaration -> declaration_specifier_list abstract_declarator','parameter_declaration',2,'p_parameter_declaration','cgrammar.py',1038),\n  ('parameter_declaration -> declaration_specifier_list','parameter_declaration',1,'p_parameter_declaration','cgrammar.py',1039),\n  ('identifier_list -> IDENTIFIER','identifier_list',1,'p_identifier_list','cgrammar.py',1055),\n  ('identifier_list -> identifier_list COMMA IDENTIFIER','identifier_list',3,'p_identifier_list','cgrammar.py',1056),\n  ('type_name -> specifier_qualifier_list','type_name',1,'p_type_name','cgrammar.py',1069),\n  ('type_name -> specifier_qualifier_list abstract_declarator','type_name',2,'p_type_name','cgrammar.py',1070),\n  ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator','cgrammar.py',1086),\n  ('abstract_declarator -> direct_abstract_declarator gcc_attributes','abstract_declarator',2,'p_abstract_declarator','cgrammar.py',1087),\n  ('abstract_declarator -> pointer direct_abstract_declarator gcc_attributes','abstract_declarator',3,'p_abstract_declarator','cgrammar.py',1088),\n  ('direct_abstract_declarator -> LPAREN gcc_attributes abstract_declarator RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator','cgrammar.py',1115),\n  ('direct_abstract_declarator -> LBRACKET RBRACKET','direct_abstract_declarator',2,'p_direct_abstract_declarator','cgrammar.py',1116),\n  ('direct_abstract_declarator -> LBRACKET constant_expression RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator','cgrammar.py',1117),\n  ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator','cgrammar.py',1118),\n  ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET constant_expression RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator','cgrammar.py',1119),\n  ('direct_abstract_declarator -> LPAREN RPAREN','direct_abstract_declarator',2,'p_direct_abstract_declarator','cgrammar.py',1120),\n  ('direct_abstract_declarator -> LPAREN parameter_type_list RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator','cgrammar.py',1121),\n  ('direct_abstract_declarator -> direct_abstract_declarator LPAREN RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator','cgrammar.py',1122),\n  ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator','cgrammar.py',1123),\n  ('initializer -> assignment_expression','initializer',1,'p_initializer','cgrammar.py',1162),\n  ('initializer -> LBRACE initializer_list RBRACE','initializer',3,'p_initializer','cgrammar.py',1163),\n  ('initializer -> LBRACE initializer_list COMMA RBRACE','initializer',4,'p_initializer','cgrammar.py',1164),\n  ('initializer_list -> initializer','initializer_list',1,'p_initializer_list','cgrammar.py',1169),\n  ('initializer_list -> initializer_list COMMA initializer','initializer_list',3,'p_initializer_list','cgrammar.py',1170),\n  ('statement -> labeled_statement','statement',1,'p_statement','cgrammar.py',1175),\n  ('statement -> compound_statement','statement',1,'p_statement','cgrammar.py',1176),\n  ('statement -> expression_statement','statement',1,'p_statement','cgrammar.py',1177),\n  ('statement -> selection_statement','statement',1,'p_statement','cgrammar.py',1178),\n  ('statement -> iteration_statement','statement',1,'p_statement','cgrammar.py',1179),\n  ('statement -> jump_statement','statement',1,'p_statement','cgrammar.py',1180),\n  ('labeled_statement -> IDENTIFIER COLON statement','labeled_statement',3,'p_labeled_statement','cgrammar.py',1185),\n  ('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement','cgrammar.py',1186),\n  ('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement','cgrammar.py',1187),\n  ('compound_statement -> LBRACE RBRACE','compound_statement',2,'p_compound_statement','cgrammar.py',1192),\n  ('compound_statement -> LBRACE statement_list RBRACE','compound_statement',3,'p_compound_statement','cgrammar.py',1193),\n  ('compound_statement -> LBRACE declaration_list RBRACE','compound_statement',3,'p_compound_statement','cgrammar.py',1194),\n  ('compound_statement -> LBRACE declaration_list statement_list RBRACE','compound_statement',4,'p_compound_statement','cgrammar.py',1195),\n  ('compound_statement -> LBRACE error RBRACE','compound_statement',3,'p_compound_statement_error','cgrammar.py',1200),\n  ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','cgrammar.py',1206),\n  ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','cgrammar.py',1207),\n  ('statement_list -> statement','statement_list',1,'p_statement_list','cgrammar.py',1212),\n  ('statement_list -> statement_list statement','statement_list',2,'p_statement_list','cgrammar.py',1213),\n  ('expression_statement -> SEMI','expression_statement',1,'p_expression_statement','cgrammar.py',1218),\n  ('expression_statement -> expression SEMI','expression_statement',2,'p_expression_statement','cgrammar.py',1219),\n  ('expression_statement -> error SEMI','expression_statement',2,'p_expression_statement_error','cgrammar.py',1224),\n  ('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement','cgrammar.py',1230),\n  ('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement','cgrammar.py',1231),\n  ('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement','cgrammar.py',1232),\n  ('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement','cgrammar.py',1237),\n  ('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement','cgrammar.py',1238),\n  ('iteration_statement -> FOR LPAREN expression_statement expression_statement RPAREN statement','iteration_statement',6,'p_iteration_statement','cgrammar.py',1239),\n  ('iteration_statement -> FOR LPAREN expression_statement expression_statement expression RPAREN statement','iteration_statement',7,'p_iteration_statement','cgrammar.py',1240),\n  ('jump_statement -> GOTO IDENTIFIER SEMI','jump_statement',3,'p_jump_statement','cgrammar.py',1245),\n  ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement','cgrammar.py',1246),\n  ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement','cgrammar.py',1247),\n  ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement','cgrammar.py',1248),\n  ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement','cgrammar.py',1249),\n  ('external_declaration -> declaration','external_declaration',1,'p_external_declaration','cgrammar.py',1254),\n  ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration','cgrammar.py',1255),\n  ('function_definition -> declaration_specifier_list declarator declaration_list compound_statement','function_definition',4,'p_function_definition','cgrammar.py',1261),\n  ('function_definition -> declaration_specifier_list declarator compound_statement','function_definition',3,'p_function_definition','cgrammar.py',1262),\n  ('function_definition -> declarator declaration_list compound_statement','function_definition',3,'p_function_definition','cgrammar.py',1263),\n  ('function_definition -> declarator compound_statement','function_definition',2,'p_function_definition','cgrammar.py',1264),\n  ('directive -> define','directive',1,'p_directive','cgrammar.py',1270),\n  ('directive -> undefine','directive',1,'p_directive','cgrammar.py',1271),\n  ('directive -> pragma','directive',1,'p_directive','cgrammar.py',1272),\n  ('define -> PP_DEFINE PP_DEFINE_NAME PP_END_DEFINE','define',3,'p_define','cgrammar.py',1277),\n  ('define -> PP_DEFINE PP_DEFINE_NAME type_name PP_END_DEFINE','define',4,'p_define','cgrammar.py',1278),\n  ('define -> PP_DEFINE PP_DEFINE_NAME constant_expression PP_END_DEFINE','define',4,'p_define','cgrammar.py',1279),\n  ('define -> PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN PP_END_DEFINE','define',5,'p_define','cgrammar.py',1280),\n  ('define -> PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN RPAREN constant_expression PP_END_DEFINE','define',6,'p_define','cgrammar.py',1281),\n  ('define -> PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN PP_END_DEFINE','define',6,'p_define','cgrammar.py',1282),\n  ('define -> PP_DEFINE PP_DEFINE_MACRO_NAME LPAREN macro_parameter_list RPAREN constant_expression PP_END_DEFINE','define',7,'p_define','cgrammar.py',1283),\n  ('define -> PP_DEFINE error PP_END_DEFINE','define',3,'p_define_error','cgrammar.py',1314),\n  ('undefine -> PP_UNDEFINE PP_DEFINE_NAME PP_END_DEFINE','undefine',3,'p_undefine','cgrammar.py',1342),\n  ('macro_parameter_list -> PP_MACRO_PARAM','macro_parameter_list',1,'p_macro_parameter_list','cgrammar.py',1353),\n  ('macro_parameter_list -> macro_parameter_list COMMA PP_MACRO_PARAM','macro_parameter_list',3,'p_macro_parameter_list','cgrammar.py',1354),\n  ('pragma -> pragma_pack','pragma',1,'p_pragma','cgrammar.py',1378),\n  ('pragma -> PRAGMA pragma_directive_list PRAGMA_END','pragma',3,'p_pragma','cgrammar.py',1379),\n  ('pragma_pack -> PRAGMA PRAGMA_PACK LPAREN RPAREN PRAGMA_END','pragma_pack',5,'p_pragma_pack','cgrammar.py',1384),\n  ('pragma_pack -> PRAGMA PRAGMA_PACK LPAREN constant RPAREN PRAGMA_END','pragma_pack',6,'p_pragma_pack','cgrammar.py',1385),\n  ('pragma_pack -> PRAGMA PRAGMA_PACK LPAREN pragma_pack_stack_args RPAREN PRAGMA_END','pragma_pack',6,'p_pragma_pack','cgrammar.py',1386),\n  ('pragma_pack_stack_args -> IDENTIFIER','pragma_pack_stack_args',1,'p_pragma_pack_stack_args','cgrammar.py',1410),\n  ('pragma_pack_stack_args -> IDENTIFIER COMMA IDENTIFIER','pragma_pack_stack_args',3,'p_pragma_pack_stack_args','cgrammar.py',1411),\n  ('pragma_pack_stack_args -> IDENTIFIER COMMA IDENTIFIER COMMA constant','pragma_pack_stack_args',5,'p_pragma_pack_stack_args','cgrammar.py',1412),\n  ('pragma_pack_stack_args -> IDENTIFIER COMMA constant COMMA IDENTIFIER','pragma_pack_stack_args',5,'p_pragma_pack_stack_args','cgrammar.py',1413),\n  ('pragma_pack_stack_args -> IDENTIFIER COMMA constant','pragma_pack_stack_args',3,'p_pragma_pack_stack_args','cgrammar.py',1414),\n  ('pragma_directive_list -> pragma_directive','pragma_directive_list',1,'p_pragma_directive_list','cgrammar.py',1434),\n  ('pragma_directive_list -> pragma_directive_list pragma_directive','pragma_directive_list',2,'p_pragma_directive_list','cgrammar.py',1435),\n  ('pragma_directive -> IDENTIFIER','pragma_directive',1,'p_pragma_directive','cgrammar.py',1444),\n  ('pragma_directive -> string_literal','pragma_directive',1,'p_pragma_directive','cgrammar.py',1445),\n]\n"
  },
  {
    "path": "ctypesgen/parser/pplexer.py",
    "content": "\"\"\"Preprocess a C source file using gcc and convert the result into\n   a token stream\n\nReference is C99 with additions from C11 and C2x:\n* http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf\n* http://www.quut.com/c/ANSI-C-grammar-l-2011.html\n* http://www.open-std.org/jtc1/sc22/wg14/www/docs/n2731.pdf\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\nfrom ctypesgen.parser.lex import TOKEN\nfrom ctypesgen.parser import cgrammar\n\n\ntokens = cgrammar.tokens\nkeywords = cgrammar.keywords\n\n\nstates = [(\"DEFINE\", \"exclusive\"), (\"PRAGMA\", \"exclusive\")]\n\n\n_B_ = r\"[0-1]\"\n_O_ = r\"[0-7]\"\n_D_ = r\"[0-9]\"\n_NZ_ = r\"[1-9]\"\n_L_ = r\"[a-zA-Z_]\"\n_A_ = r\"[a-zA-Z_0-9]\"\n_H_ = r\"[a-fA-F0-9]\"\n_HP_ = r\"0[xX]\"\n_BP_ = r\"0[bB]\"\n_E_ = r\"([Ee][+-]?\" + _D_ + r\"+)\"\n_P_ = r\"([Pp][+-]?\" + _D_ + r\"+)\"\n_FS_ = r\"(f|F|l|L)\"\n_IS_ = r\"(((u|U)(ll|LL|l|L)?)|((ll|LL|l|L)(u|U)?))\"\n_CP_ = r\"(u|U|L)\"\n_SP_ = r\"(u8|u|U|L)\"\n_ES_ = r\"(\\\\([\\'\\\"\\?\\\\abfnrtv]|[0-7]{1,3}|x[a-fA-F0-9]+))\"\n_WS_ = r\"[ \\t\\v\\n\\f]\"\n\nI_CONST_HEX = r\"(?P<p1>\" + _HP_ + _H_ + r\"+\" + r\")\" + _IS_ + r\"?\"\nI_CONST_DEC = r\"(?P<p1>\" + _NZ_ + _D_ + r\"*\" + r\")\" + _IS_ + r\"?\"\nI_CONST_OCT = r\"0\" + r\"(?P<p1>\" + _O_ + r\"*)\" + _IS_ + r\"?\"\nI_CONST_BIN = r\"(?P<p1>\" + _BP_ + _B_ + \"*\" + r\")\" + _IS_ + \"?\"\n\nF_CONST_1 = r\"(?P<sig>\" + _D_ + r\"+\" + r\")(?P<exp>\" + _E_ + r\")\" + _FS_ + r\"?\"\nF_CONST_2 = r\"(?P<sig>\" + _D_ + r\"*\\.\" + _D_ + r\"+\" + r\")(?P<exp>\" + _E_ + r\"?)\" + _FS_ + r\"?\"\nF_CONST_3 = r\"(?P<sig>\" + _D_ + r\"+\\.\" + r\")(?P<exp>\" + _E_ + r\"?)\" + _FS_ + r\"?\"\nF_CONST_4 = r\"(?P<hex>\" + _HP_ + _H_ + r\"+\" + _P_ + r\")\" + _FS_ + r\"?\"\nF_CONST_5 = r\"(?P<hex>\" + _HP_ + _H_ + r\"*\\.\" + _H_ + r\"+\" + _P_ + r\")\" + _FS_ + r\"?\"\nF_CONST_6 = r\"(?P<hex>\" + _HP_ + _H_ + r\"+\\.\" + _P_ + r\")\" + _FS_ + r\"?\"\n\nCHARACTER_CONSTANT = _SP_ + r\"?'(?P<p1>\\\\.|[^\\\\'])+'\"\nIDENTIFIER = _L_ + _A_ + r\"*\"\n\nescape_sequence_start_in_string = r\"\"\"(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?'\"])\"\"\"\nstring_char = r\"\"\"([^\"\\\\\\n]|\"\"\" + escape_sequence_start_in_string + \")\"\nSTRING_LITERAL = '\"' + string_char + '*\"'\n\n# Process line-number directives from the preprocessor\n# See https://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html\nDIRECTIVE = r'\\#\\s+(?P<lineno>\\d+)\\s+\"(?P<filename>[^\"]+)\"[ \\d]*\\n'\n\n\n# --------------------------------------------------------------------------\n# Token value types\n# --------------------------------------------------------------------------\n\n# Numbers represented as int and float types.\n# For all other tokens, type is just str representation.\n\n\nclass StringLiteral(str):\n    def __new__(cls, value):\n        assert value[0] == '\"' and value[-1] == '\"'\n        # Unescaping probably not perfect but close enough.\n        value = value[1:-1]  # .decode('string_escape')\n        return str.__new__(cls, value)\n\n\n# --------------------------------------------------------------------------\n# Token declarations\n# --------------------------------------------------------------------------\n\n\n# Assignment operators\nt_ANY_EQUALS = r\"=\"\nt_ANY_RIGHT_ASSIGN = r\">>=\"\nt_ANY_LEFT_ASSIGN = r\"<<=\"\nt_ANY_ADD_ASSIGN = r\"\\+=\"\nt_ANY_SUB_ASSIGN = r\"-=\"\nt_ANY_MUL_ASSIGN = r\"\\*=\"\nt_ANY_DIV_ASSIGN = r\"/=\"\nt_ANY_MOD_ASSIGN = r\"%=\"\nt_ANY_AND_ASSIGN = r\"&=\"\nt_ANY_XOR_ASSIGN = r\"\\^=\"\nt_ANY_OR_ASSIGN = r\"\\|=\"\n\n# Operators\nt_ANY_PLUS = r\"\\+\"\nt_ANY_MINUS = r\"-\"\nt_ANY_TIMES = r\"\\*\"\nt_ANY_DIVIDE = r\"/\"\nt_ANY_MOD = r\"%\"\nt_ANY_AND = r\"&\"\nt_ANY_OR = r\"\\|\"\nt_ANY_NOT = r\"~\"\nt_ANY_XOR = r\"\\^\"\nt_ANY_RIGHT_OP = r\">>\"\nt_ANY_LEFT_OP = r\"<<\"\nt_ANY_INC_OP = r\"\\+\\+\"\nt_ANY_DEC_OP = r\"--\"\nt_ANY_PTR_OP = r\"->\"\nt_ANY_AND_OP = r\"&&\"\nt_ANY_OR_OP = r\"\\|\\|\"\nt_ANY_LE_OP = r\"<=\"\nt_ANY_GE_OP = r\">=\"\nt_ANY_EQ_OP = r\"==\"\nt_ANY_NE_OP = r\"!=\"\nt_ANY_LNOT = r\"!\"\nt_ANY_LT = r\"<\"\nt_ANY_GT = r\">\"\nt_ANY_CONDOP = r\"\\?\"\n\n\n# Delimiters\nt_ANY_PERIOD = r\"\\.\"\nt_ANY_LPAREN = r\"\\(\"\nt_ANY_RPAREN = r\"\\)\"\nt_ANY_ELLIPSIS = r\"\\.\\.\\.\"\nt_ANY_LBRACKET = r\"\\[\"\nt_ANY_RBRACKET = r\"\\]\"\nt_ANY_LBRACE = r\"\\{\"\nt_ANY_RBRACE = r\"\\}\"\nt_ANY_COMMA = r\",\"\nt_ANY_SEMI = r\";\"\nt_ANY_COLON = r\":\"\n\n\n@TOKEN(DIRECTIVE)\ndef t_ANY_directive(t):\n    m = t.lexer.lexmatch\n    t.lexer.filename = m.group(\"filename\")\n    t.lexer.lineno = int(m.group(\"lineno\"))\n    return None\n\n\n@TOKEN(F_CONST_1)\ndef t_ANY_f_const_1(t):\n    t.type = \"F_CONST_1\"\n    m = t.lexer.lexmatch\n    sig = m.group(\"sig\")\n    exp = m.group(\"exp\")\n    t.value = sig + exp\n    return t\n\n\n@TOKEN(F_CONST_2)\ndef t_ANY_f_const_2(t):\n    t.type = \"F_CONST_2\"\n    m = t.lexer.lexmatch\n    sig = m.group(\"sig\")\n    exp = m.group(\"exp\")\n    t.value = sig + exp\n    return t\n\n\n@TOKEN(F_CONST_3)\ndef t_ANY_f_const_3(t):\n    t.type = \"F_CONST_3\"\n    m = t.lexer.lexmatch\n    sig = m.group(\"sig\")\n    exp = m.group(\"exp\")\n    t.value = sig + exp\n    return t\n\n\n@TOKEN(F_CONST_4)\ndef t_ANY_f_const_4(t):\n    t.type = \"F_CONST_4\"\n    m = t.lexer.lexmatch\n    t.value = 'float.fromhex(\"' + m.group(\"hex\") + '\")'\n    return t\n\n\n@TOKEN(F_CONST_5)\ndef t_ANY_f_const_5(t):\n    t.type = \"F_CONST_5\"\n    m = t.lexer.lexmatch\n    t.value = 'float.fromhex(\"' + m.group(\"hex\") + '\")'\n    return t\n\n\n@TOKEN(F_CONST_6)\ndef t_ANY_f_const_6(t):\n    t.type = \"F_CONST_6\"\n    m = t.lexer.lexmatch\n    t.value = 'float.fromhex(\"' + m.group(\"hex\") + '\")'\n    return t\n\n\n@TOKEN(I_CONST_BIN)\ndef t_ANY_i_const_bin(t):\n    t.type = \"I_CONST_BIN\"\n    m = t.lexer.lexmatch\n    t.value = m.group(\"p1\")\n    return t\n\n\n@TOKEN(I_CONST_HEX)\ndef t_ANY_i_const_hex(t):\n    t.type = \"I_CONST_HEX\"\n    m = t.lexer.lexmatch\n    t.value = m.group(\"p1\")\n    return t\n\n\n@TOKEN(I_CONST_DEC)\ndef t_ANY_i_const_dec(t):\n    t.type = \"I_CONST_DEC\"\n    m = t.lexer.lexmatch\n    t.value = m.group(\"p1\")\n    return t\n\n\n@TOKEN(I_CONST_OCT)\ndef t_ANY_i_const_oct(t):\n    t.type = \"I_CONST_OCT\"\n    m = t.lexer.lexmatch\n    p1 = m.group(\"p1\")\n    if not p1:\n        t.value = \"0\"\n    else:\n        t.value = \"0o\" + m.group(\"p1\")\n    return t\n\n\n@TOKEN(CHARACTER_CONSTANT)\ndef t_ANY_character_constant(t):\n    t.type = \"CHARACTER_CONSTANT\"\n    m = t.lexer.lexmatch\n    p1 = m.group(\"p1\")\n    t.value = p1\n    return t\n\n\n@TOKEN(STRING_LITERAL)\ndef t_ANY_string_literal(t):\n    t.type = \"STRING_LITERAL\"\n    t.value = StringLiteral(t.value)\n    return t\n\n\n@TOKEN(IDENTIFIER)\ndef t_INITIAL_identifier(t):\n    t.type = \"IDENTIFIER\"\n    return t\n\n\n@TOKEN(IDENTIFIER)\ndef t_DEFINE_identifier(t):\n    if t.lexer.next_is_define_name:\n        # This identifier is the name of a macro\n        # We need to look ahead and see if this macro takes parameters or not.\n        if (\n            t.lexpos + len(t.value) < t.lexer.lexlen\n            and t.lexer.lexdata[t.lexpos + len(t.value)] == \"(\"\n        ):\n            t.type = \"PP_DEFINE_MACRO_NAME\"\n\n            # Look ahead and read macro parameter list\n            lexdata = t.lexer.lexdata\n            pos = t.lexpos + len(t.value) + 1\n            while lexdata[pos] not in \"\\n)\":\n                pos += 1\n            params = lexdata[t.lexpos + len(t.value) + 1 : pos]\n            paramlist = [x.strip() for x in params.split(\",\") if x.strip()]\n            t.lexer.macro_params = paramlist\n\n        else:\n            t.type = \"PP_DEFINE_NAME\"\n\n        t.lexer.next_is_define_name = False\n    elif t.value in t.lexer.macro_params:\n        t.type = \"PP_MACRO_PARAM\"\n    else:\n        t.type = \"IDENTIFIER\"\n    return t\n\n\n@TOKEN(r\"\\n\")\ndef t_INITIAL_newline(t):\n    t.lexer.lineno += 1\n    return None\n\n\n@TOKEN(r\"\\#undef\")\ndef t_INITIAL_pp_undefine(t):\n    t.type = \"PP_UNDEFINE\"\n    t.lexer.begin(\"DEFINE\")\n    t.lexer.next_is_define_name = True\n    t.lexer.macro_params = set()\n    return t\n\n\n@TOKEN(r\"\\#define\")\ndef t_INITIAL_pp_define(t):\n    t.type = \"PP_DEFINE\"\n    t.lexer.begin(\"DEFINE\")\n    t.lexer.next_is_define_name = True\n    t.lexer.macro_params = set()\n    return t\n\n\n@TOKEN(r\"\\#pragma\")\ndef t_INITIAL_pragma(t):\n    t.type = \"PRAGMA\"\n    t.lexer.begin(\"PRAGMA\")\n    return t\n\n\n@TOKEN(r\"pack\")\ndef t_PRAGMA_pack(t):\n    t.type = \"PRAGMA_PACK\"\n    return t\n\n\n@TOKEN(r\"\\n\")\ndef t_PRAGMA_newline(t):\n    t.type = \"PRAGMA_END\"\n    t.lexer.begin(\"INITIAL\")\n    t.lexer.lineno += 1\n    return t\n\n\n@TOKEN(IDENTIFIER)\ndef t_PRAGMA_identifier(t):\n    t.type = \"IDENTIFIER\"\n    return t\n\n\ndef t_PRAGMA_error(t):\n    t.type = \"OTHER\"\n    t.value = t.value[0:30]\n    t.lexer.lexpos += 1  # Skip it if it's an error in a #pragma\n    return t\n\n\n@TOKEN(r\"\\n\")\ndef t_DEFINE_newline(t):\n    t.type = \"PP_END_DEFINE\"\n    t.lexer.begin(\"INITIAL\")\n    t.lexer.lineno += 1\n    del t.lexer.macro_params\n\n    # Damage control in case the token immediately after the #define failed\n    # to handle this\n    t.lexer.next_is_define_name = False\n    return t\n\n\n@TOKEN(r\"(\\#\\#)|(\\#)\")\ndef t_DEFINE_pp_param_op(t):\n    if t.value == \"#\":\n        t.type = \"PP_STRINGIFY\"\n    else:\n        t.type = \"PP_IDENTIFIER_PASTE\"\n    return t\n\n\ndef t_INITIAL_error(t):\n    t.type = \"OTHER\"\n    return t\n\n\ndef t_DEFINE_error(t):\n    t.type = \"OTHER\"\n    t.value = t.value[0]\n    t.lexer.lexpos += 1  # Skip it if it's an error in a #define\n    return t\n\n\nt_ANY_ignore = \" \\t\\v\\f\\r\"\n"
  },
  {
    "path": "ctypesgen/parser/preprocessor.py",
    "content": "\"\"\"Preprocess a C source file using gcc and convert the result into\n   a token stream\n\nReference is C99:\n  * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf\n\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\nimport os\nimport re\nimport sys\nimport subprocess\n\nfrom ctypesgen.parser import pplexer, lex\nfrom ctypesgen.parser.lex import LexError\n\n\nIS_WINDOWS = sys.platform.startswith(\"win\")\nIS_MAC = sys.platform.startswith(\"darwin\")\n\n# --------------------------------------------------------------------------\n# Lexers\n# --------------------------------------------------------------------------\n\n\nclass PreprocessorLexer(lex.Lexer):\n    def __init__(self):\n        lex.Lexer.__init__(self)\n        self.filename = \"<input>\"\n        self.in_define = False\n\n    def input(self, data, filename=None):\n        if filename:\n            self.filename = filename\n        self.lasttoken = None\n\n        lex.Lexer.input(self, data)\n\n    def token(self):\n        result = lex.Lexer.token(self)\n        if result:\n            self.lasttoken = result.type\n            result.filename = self.filename\n        else:\n            self.lasttoken = None\n\n        return result\n\n\n# --------------------------------------------------------------------------\n# Grammars\n# --------------------------------------------------------------------------\n\n\nclass PreprocessorParser(object):\n    def __init__(self, options, cparser):\n        self.defines = [\n            \"__extension__=\",\n            \"__const=const\",\n            \"__asm__(x)=\",\n            \"__asm(x)=\",\n            \"CTYPESGEN=1\",\n        ]\n\n        # On macOS, explicitly add these defines to keep from getting syntax\n        # errors in the macOS standard headers.\n        if IS_MAC:\n            self.defines += [\n                \"_Nullable=\",\n                \"_Nonnull=\",\n            ]\n\n        self.matches = []\n        self.output = []\n        optimize = options.optimize_lexer if hasattr(options, \"optimize_lexer\") else False\n        self.lexer = lex.lex(\n            cls=PreprocessorLexer,\n            optimize=optimize,\n            lextab=\"lextab\",\n            outputdir=os.path.dirname(__file__),\n            module=pplexer,\n        )\n\n        self.options = options\n        self.cparser = cparser  # An instance of CParser\n\n    def parse(self, filename):\n        \"\"\"Parse a file and save its output\"\"\"\n\n        cmd = self.options.cpp\n\n        # Legacy behaviour is to implicitly undefine '__GNUC__'\n        # Continue doing this, unless user explicitly requested to allow it.\n        if self.options.allow_gnu_c:\n            # New behaviour. No implicit override.\n            # (currently NOT enabled by default yet)\n            pass\n        else:\n            # Legacy behaviour. Add an implicit override.\n            # (currently the default)\n            cmd += \" -U __GNUC__\"\n\n        cmd += \" -dD\"\n\n        for undefine in self.options.cpp_undefines:\n            cmd += \" -U%s\" % undefine\n\n        # This fixes Issue #6 where OS X 10.6+ adds a C extension that breaks\n        # the parser.  Blocks shouldn't be needed for ctypesgen support anyway.\n        if IS_MAC:\n            cmd += \" -U __BLOCKS__\"\n\n        for path in self.options.include_search_paths:\n            cmd += ' -I\"%s\"' % path\n        for define in self.defines + self.options.cpp_defines:\n            cmd += ' \"-D%s\"' % define\n        cmd += ' \"' + filename + '\"'\n\n        self.cparser.handle_status(cmd)\n\n        pp = subprocess.Popen(\n            cmd,\n            shell=True,\n            universal_newlines=False,  # binary\n            stdout=subprocess.PIPE,\n            stderr=subprocess.PIPE,\n        )\n        ppout_data, pperr_data = pp.communicate()\n\n        try:\n            ppout = ppout_data.decode(\"utf-8\")\n        except UnicodeError:\n            if IS_MAC:\n                ppout = ppout_data.decode(\"utf-8\", errors=\"replace\")\n            else:\n                raise UnicodeError\n        pperr = pperr_data.decode(\"utf-8\")\n\n        if IS_WINDOWS:\n            ppout = ppout.replace(\"\\r\\n\", \"\\n\")\n            pperr = pperr.replace(\"\\r\\n\", \"\\n\")\n\n        for line in pperr.split(\"\\n\"):\n            if line:\n                self.cparser.handle_pp_error(line)\n\n        # We separate lines to two groups: directives and c-source.  Note that\n        # #pragma directives actually belong to the source category for this.\n        # This is necessary because some source files intermix preprocessor\n        # directives with source--this is not tolerated by ctypesgen's single\n        # grammar.\n        # We put all the source lines first, then all the #define lines.\n\n        source_lines = []\n        define_lines = []\n\n        first_token_reg = re.compile(r\"^#\\s*([^ ]+)($|\\s)\")\n\n        for line in ppout.split(\"\\n\"):\n            line += \"\\n\"\n            search = first_token_reg.match(line)\n            hash_token = search.group(1) if search else None\n\n            if (not hash_token) or hash_token == \"pragma\":\n                source_lines.append(line)\n                define_lines.append(\"\\n\")\n\n            elif hash_token.isdigit():\n                # Line number information has to go with both groups\n                source_lines.append(line)\n                define_lines.append(line)\n\n            else:  # hash_token in (\"define\", \"undef\"):\n                source_lines.append(\"\\n\")\n                define_lines.append(line)\n\n        text = \"\".join(source_lines + define_lines)\n\n        if self.options.save_preprocessed_headers:\n            self.cparser.handle_status(\n                \"Saving preprocessed headers to %s.\" % self.options.save_preprocessed_headers\n            )\n            try:\n                with open(self.options.save_preprocessed_headers, \"w\") as f:\n                    f.write(text)\n            except IOError:\n                self.cparser.handle_error(\"Couldn't save headers.\")\n\n        self.lexer.input(text)\n        self.output = []\n\n        try:\n            while True:\n                token = self.lexer.token()\n                if token is not None:\n                    self.output.append(token)\n                else:\n                    break\n        except LexError as e:\n            self.cparser.handle_error(\"{}; {}\".format(e, e.text.partition(\"\\n\")[0]), filename, 0)\n"
  },
  {
    "path": "ctypesgen/parser/yacc.py",
    "content": "# -----------------------------------------------------------------------------\n# ply: yacc.py\n#\n# Copyright (C) 2001-2018\n# David M. Beazley (Dabeaz LLC)\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright notice,\n#   this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above copyright notice,\n#   this list of conditions and the following disclaimer in the documentation\n#   and/or other materials provided with the distribution.\n# * Neither the name of the David Beazley or Dabeaz LLC may be used to\n#   endorse or promote products derived from this software without\n#  specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n# -----------------------------------------------------------------------------\n#\n# This implements an LR parser that is constructed from grammar rules defined\n# as Python functions. The grammar is specified by supplying the BNF inside\n# Python documentation strings.  The inspiration for this technique was borrowed\n# from John Aycock's Spark parsing system.  PLY might be viewed as cross between\n# Spark and the GNU bison utility.\n#\n# The current implementation is only somewhat object-oriented. The\n# LR parser itself is defined in terms of an object (which allows multiple\n# parsers to co-exist).  However, most of the variables used during table\n# construction are defined in terms of global variables.  Users shouldn't\n# notice unless they are trying to define multiple parsers at the same\n# time using threads (in which case they should have their head examined).\n#\n# This implementation supports both SLR and LALR(1) parsing.  LALR(1)\n# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),\n# using the algorithm found in Aho, Sethi, and Ullman \"Compilers: Principles,\n# Techniques, and Tools\" (The Dragon Book).  LALR(1) has since been replaced\n# by the more efficient DeRemer and Pennello algorithm.\n#\n# :::::::: WARNING :::::::\n#\n# Construction of LR parsing tables is fairly complicated and expensive.\n# To make this module run fast, a *LOT* of work has been put into\n# optimization---often at the expensive of readability and what might\n# consider to be good Python \"coding style.\"   Modify the code at your\n# own risk!\n# ----------------------------------------------------------------------------\n\nimport re\nimport types\nimport sys\nimport os.path\nimport inspect\nimport warnings\n\n__version__    = '3.11'\n__tabversion__ = '3.10'\n\n#-----------------------------------------------------------------------------\n#                     === User configurable parameters ===\n#\n# Change these to modify the default behavior of yacc (if you wish)\n#-----------------------------------------------------------------------------\n\nyaccdebug   = True             # Debugging mode.  If set, yacc generates a\n                               # a 'parser.out' file in the current directory\n\ndebug_file  = 'parser.out'     # Default name of the debugging file\ntab_module  = 'parsetab'       # Default name of the table module\ndefault_lr  = 'LALR'           # Default LR table generation method\n\nerror_count = 3                # Number of symbols that must be shifted to leave recovery mode\n\nyaccdevel   = False            # Set to True if developing yacc.  This turns off optimized\n                               # implementations of certain functions.\n\nresultlimit = 40               # Size limit of results when running in debug mode.\n\npickle_protocol = 0            # Protocol to use when writing pickle files\n\n# String type-checking compatibility\nif sys.version_info[0] < 3:\n    string_types = basestring\nelse:\n    string_types = str\n\nMAXINT = sys.maxsize\n\n# This object is a stand-in for a logging object created by the\n# logging module.   PLY will use this by default to create things\n# such as the parser.out file.  If a user wants more detailed\n# information, they can create their own logging object and pass\n# it into PLY.\n\nclass PlyLogger(object):\n    def __init__(self, f):\n        self.f = f\n\n    def debug(self, msg, *args, **kwargs):\n        self.f.write((msg % args) + '\\n')\n\n    info = debug\n\n    def warning(self, msg, *args, **kwargs):\n        self.f.write('WARNING: ' + (msg % args) + '\\n')\n\n    def error(self, msg, *args, **kwargs):\n        self.f.write('ERROR: ' + (msg % args) + '\\n')\n\n    critical = debug\n\n# Null logger is used when no output is generated. Does nothing.\nclass NullLogger(object):\n    def __getattribute__(self, name):\n        return self\n\n    def __call__(self, *args, **kwargs):\n        return self\n\n# Exception raised for yacc-related errors\nclass YaccError(Exception):\n    pass\n\n# Format the result message that the parser produces when running in debug mode.\ndef format_result(r):\n    repr_str = repr(r)\n    if '\\n' in repr_str:\n        repr_str = repr(repr_str)\n    if len(repr_str) > resultlimit:\n        repr_str = repr_str[:resultlimit] + ' ...'\n    result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)\n    return result\n\n# Format stack entries when the parser is running in debug mode\ndef format_stack_entry(r):\n    repr_str = repr(r)\n    if '\\n' in repr_str:\n        repr_str = repr(repr_str)\n    if len(repr_str) < 16:\n        return repr_str\n    else:\n        return '<%s @ 0x%x>' % (type(r).__name__, id(r))\n\n# Panic mode error recovery support.   This feature is being reworked--much of the\n# code here is to offer a deprecation/backwards compatible transition\n\n_errok = None\n_token = None\n_restart = None\n_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().\nInstead, invoke the methods on the associated parser instance:\n\n    def p_error(p):\n        ...\n        # Use parser.errok(), parser.token(), parser.restart()\n        ...\n\n    parser = yacc.yacc()\n'''\n\ndef errok():\n    warnings.warn(_warnmsg)\n    return _errok()\n\ndef restart():\n    warnings.warn(_warnmsg)\n    return _restart()\n\ndef token():\n    warnings.warn(_warnmsg)\n    return _token()\n\n# Utility function to call the p_error() function with some deprecation hacks\ndef call_errorfunc(errorfunc, token, parser):\n    global _errok, _token, _restart\n    _errok = parser.errok\n    _token = parser.token\n    _restart = parser.restart\n    r = errorfunc(token)\n    try:\n        del _errok, _token, _restart\n    except NameError:\n        pass\n    return r\n\n#-----------------------------------------------------------------------------\n#                        ===  LR Parsing Engine ===\n#\n# The following classes are used for the LR parser itself.  These are not\n# used during table construction and are independent of the actual LR\n# table generation algorithm\n#-----------------------------------------------------------------------------\n\n# This class is used to hold non-terminal grammar symbols during parsing.\n# It normally has the following attributes set:\n#        .type       = Grammar symbol type\n#        .value      = Symbol value\n#        .lineno     = Starting line number\n#        .endlineno  = Ending line number (optional, set automatically)\n#        .lexpos     = Starting lex position\n#        .endlexpos  = Ending lex position (optional, set automatically)\n\nclass YaccSymbol:\n    def __str__(self):\n        return self.type\n\n    def __repr__(self):\n        return str(self)\n\n# This class is a wrapper around the objects actually passed to each\n# grammar rule.   Index lookup and assignment actually assign the\n# .value attribute of the underlying YaccSymbol object.\n# The lineno() method returns the line number of a given\n# item (or 0 if not defined).   The linespan() method returns\n# a tuple of (startline,endline) representing the range of lines\n# for a symbol.  The lexspan() method returns a tuple (lexpos,endlexpos)\n# representing the range of positional information for a symbol.\n\nclass YaccProduction:\n    def __init__(self, s, stack=None):\n        self.slice = s\n        self.stack = stack\n        self.lexer = None\n        self.parser = None\n\n    def __getitem__(self, n):\n        if isinstance(n, slice):\n            return [s.value for s in self.slice[n]]\n        elif n >= 0:\n            return self.slice[n].value\n        else:\n            return self.stack[n].value\n\n    def __setitem__(self, n, v):\n        self.slice[n].value = v\n\n    def __getslice__(self, i, j):\n        return [s.value for s in self.slice[i:j]]\n\n    def __len__(self):\n        return len(self.slice)\n\n    def lineno(self, n):\n        return getattr(self.slice[n], 'lineno', 0)\n\n    def set_lineno(self, n, lineno):\n        self.slice[n].lineno = lineno\n\n    def linespan(self, n):\n        startline = getattr(self.slice[n], 'lineno', 0)\n        endline = getattr(self.slice[n], 'endlineno', startline)\n        return startline, endline\n\n    def lexpos(self, n):\n        return getattr(self.slice[n], 'lexpos', 0)\n\n    def set_lexpos(self, n, lexpos):\n        self.slice[n].lexpos = lexpos\n\n    def lexspan(self, n):\n        startpos = getattr(self.slice[n], 'lexpos', 0)\n        endpos = getattr(self.slice[n], 'endlexpos', startpos)\n        return startpos, endpos\n\n    def error(self):\n        raise SyntaxError\n\n# -----------------------------------------------------------------------------\n#                               == LRParser ==\n#\n# The LR Parsing engine.\n# -----------------------------------------------------------------------------\n\nclass LRParser:\n    def __init__(self, lrtab, errorf):\n        self.productions = lrtab.lr_productions\n        self.action = lrtab.lr_action\n        self.goto = lrtab.lr_goto\n        self.errorfunc = errorf\n        self.set_defaulted_states()\n        self.errorok = True\n\n    def errok(self):\n        self.errorok = True\n\n    def restart(self):\n        del self.statestack[:]\n        del self.symstack[:]\n        sym = YaccSymbol()\n        sym.type = '$end'\n        self.symstack.append(sym)\n        self.statestack.append(0)\n\n    # Defaulted state support.\n    # This method identifies parser states where there is only one possible reduction action.\n    # For such states, the parser can make a choose to make a rule reduction without consuming\n    # the next look-ahead token.  This delayed invocation of the tokenizer can be useful in\n    # certain kinds of advanced parsing situations where the lexer and parser interact with\n    # each other or change states (i.e., manipulation of scope, lexer states, etc.).\n    #\n    # See:  http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions\n    def set_defaulted_states(self):\n        self.defaulted_states = {}\n        for state, actions in self.action.items():\n            rules = list(actions.values())\n            if len(rules) == 1 and rules[0] < 0:\n                self.defaulted_states[state] = rules[0]\n\n    def disable_defaulted_states(self):\n        self.defaulted_states = {}\n\n    def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):\n        if debug or yaccdevel:\n            if isinstance(debug, int):\n                debug = PlyLogger(sys.stderr)\n            return self.parsedebug(input, lexer, debug, tracking, tokenfunc)\n        elif tracking:\n            return self.parseopt(input, lexer, debug, tracking, tokenfunc)\n        else:\n            return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)\n\n\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n    # parsedebug().\n    #\n    # This is the debugging enabled version of parse().  All changes made to the\n    # parsing engine should be made here.   Optimized versions of this function\n    # are automatically created by the ply/ygen.py script.  This script cuts out\n    # sections enclosed in markers such as this:\n    #\n    #      #--! DEBUG\n    #      statements\n    #      #--! DEBUG\n    #\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n    def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):\n        #--! parsedebug-start\n        lookahead = None                         # Current lookahead symbol\n        lookaheadstack = []                      # Stack of lookahead symbols\n        actions = self.action                    # Local reference to action table (to avoid lookup on self.)\n        goto    = self.goto                      # Local reference to goto table (to avoid lookup on self.)\n        prod    = self.productions               # Local reference to production list (to avoid lookup on self.)\n        defaulted_states = self.defaulted_states # Local reference to defaulted states\n        pslice  = YaccProduction(None)           # Production object passed to grammar rules\n        errorcount = 0                           # Used during error recovery\n\n        #--! DEBUG\n        debug.info('PLY: PARSE DEBUG START')\n        #--! DEBUG\n\n        # If no lexer was given, we will try to use the lex module\n        if not lexer:\n            from . import lex\n            lexer = lex.lexer\n\n        # Set up the lexer and parser objects on pslice\n        pslice.lexer = lexer\n        pslice.parser = self\n\n        # If input was supplied, pass to lexer\n        if input is not None:\n            lexer.input(input)\n\n        if tokenfunc is None:\n            # Tokenize function\n            get_token = lexer.token\n        else:\n            get_token = tokenfunc\n\n        # Set the parser() token method (sometimes used in error recovery)\n        self.token = get_token\n\n        # Set up the state and symbol stacks\n\n        statestack = []                # Stack of parsing states\n        self.statestack = statestack\n        symstack   = []                # Stack of grammar symbols\n        self.symstack = symstack\n\n        pslice.stack = symstack         # Put in the production\n        errtoken   = None               # Err token\n\n        # The start state is assumed to be (0,$end)\n\n        statestack.append(0)\n        sym = YaccSymbol()\n        sym.type = '$end'\n        symstack.append(sym)\n        state = 0\n        while True:\n            # Get the next symbol on the input.  If a lookahead symbol\n            # is already set, we just use that. Otherwise, we'll pull\n            # the next token off of the lookaheadstack or from the lexer\n\n            #--! DEBUG\n            debug.debug('')\n            debug.debug('State  : %s', state)\n            #--! DEBUG\n\n            if state not in defaulted_states:\n                if not lookahead:\n                    if not lookaheadstack:\n                        lookahead = get_token()     # Get the next token\n                    else:\n                        lookahead = lookaheadstack.pop()\n                    if not lookahead:\n                        lookahead = YaccSymbol()\n                        lookahead.type = '$end'\n\n                # Check the action table\n                ltype = lookahead.type\n                t = actions[state].get(ltype)\n            else:\n                t = defaulted_states[state]\n                #--! DEBUG\n                debug.debug('Defaulted state %s: Reduce using %d', state, -t)\n                #--! DEBUG\n\n            #--! DEBUG\n            debug.debug('Stack  : %s',\n                        ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())\n            #--! DEBUG\n\n            if t is not None:\n                if t > 0:\n                    # shift a symbol on the stack\n                    statestack.append(t)\n                    state = t\n\n                    #--! DEBUG\n                    debug.debug('Action : Shift and goto state %s', t)\n                    #--! DEBUG\n\n                    symstack.append(lookahead)\n                    lookahead = None\n\n                    # Decrease error count on successful shift\n                    if errorcount:\n                        errorcount -= 1\n                    continue\n\n                if t < 0:\n                    # reduce a symbol on the stack, emit a production\n                    p = prod[-t]\n                    pname = p.name\n                    plen  = p.len\n\n                    # Get production function\n                    sym = YaccSymbol()\n                    sym.type = pname       # Production name\n                    sym.value = None\n\n                    #--! DEBUG\n                    if plen:\n                        debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,\n                                   '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',\n                                   goto[statestack[-1-plen]][pname])\n                    else:\n                        debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],\n                                   goto[statestack[-1]][pname])\n\n                    #--! DEBUG\n\n                    if plen:\n                        targ = symstack[-plen-1:]\n                        targ[0] = sym\n\n                        #--! TRACKING\n                        if tracking:\n                            t1 = targ[1]\n                            sym.lineno = t1.lineno\n                            sym.lexpos = t1.lexpos\n                            sym.filename = getattr(t1, 'filename', \"\")  # <mod NL>\n                            t1 = targ[-1]\n                            sym.endlineno = getattr(t1, 'endlineno', t1.lineno)\n                            sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)\n                        #--! TRACKING\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # below as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            del symstack[-plen:]\n                            self.state = state\n                            p.callable(pslice)\n                            del statestack[-plen:]\n                            #--! DEBUG\n                            debug.info('Result : %s', format_result(pslice[0]))\n                            #--! DEBUG\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            symstack.extend(targ[1:-1])         # Put the production slice back on the stack\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                    else:\n\n                        #--! TRACKING\n                        if tracking:\n                            sym.lineno = lexer.lineno\n                            sym.lexpos = lexer.lexpos\n                            sym.filename = getattr(lexer, 'filename', \"\")  # <mod NL>\n                        #--! TRACKING\n\n                        targ = [sym]\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # above as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            self.state = state\n                            p.callable(pslice)\n                            #--! DEBUG\n                            debug.info('Result : %s', format_result(pslice[0]))\n                            #--! DEBUG\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                if t == 0:\n                    n = symstack[-1]\n                    result = getattr(n, 'value', None)\n                    #--! DEBUG\n                    debug.info('Done   : Returning %s', format_result(result))\n                    debug.info('PLY: PARSE DEBUG END')\n                    #--! DEBUG\n                    return result\n\n            if t is None:\n\n                #--! DEBUG\n                debug.error('Error  : %s',\n                            ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())\n                #--! DEBUG\n\n                # We have some kind of parsing error here.  To handle\n                # this, we are going to push the current token onto\n                # the tokenstack and replace it with an 'error' token.\n                # If there are any synchronization rules, they may\n                # catch it.\n                #\n                # In addition to pushing the error token, we call call\n                # the user defined p_error() function if this is the\n                # first syntax error.  This function is only called if\n                # errorcount == 0.\n                if errorcount == 0 or self.errorok:\n                    errorcount = error_count\n                    self.errorok = False\n                    errtoken = lookahead\n                    if errtoken.type == '$end':\n                        errtoken = None               # End of file!\n                    if self.errorfunc:\n                        if errtoken and not hasattr(errtoken, 'lexer'):\n                            errtoken.lexer = lexer\n                        self.state = state\n                        tok = call_errorfunc(self.errorfunc, errtoken, self)\n                        if self.errorok:\n                            # User must have done some kind of panic\n                            # mode recovery on their own.  The\n                            # returned token is the next lookahead\n                            lookahead = tok\n                            errtoken = None\n                            continue\n                    else:\n                        if errtoken:\n                            if hasattr(errtoken, 'lineno'):\n                                lineno = lookahead.lineno\n                            else:\n                                lineno = 0\n                            if lineno:\n                                sys.stderr.write('yacc: Syntax error at line %d, token=%s\\n' % (lineno, errtoken.type))\n                            else:\n                                sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)\n                        else:\n                            sys.stderr.write('yacc: Parse error in input. EOF\\n')\n                            return\n\n                else:\n                    errorcount = error_count\n\n                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the\n                # entire parse has been rolled back and we're completely hosed.   The token is\n                # discarded and we just keep going.\n\n                if len(statestack) <= 1 and lookahead.type != '$end':\n                    lookahead = None\n                    errtoken = None\n                    state = 0\n                    # Nuke the pushback stack\n                    del lookaheadstack[:]\n                    continue\n\n                # case 2: the statestack has a couple of entries on it, but we're\n                # at the end of the file. nuke the top entry and generate an error token\n\n                # Start nuking entries on the stack\n                if lookahead.type == '$end':\n                    # Whoa. We're really hosed here. Bail out\n                    return\n\n                if lookahead.type != 'error':\n                    sym = symstack[-1]\n                    if sym.type == 'error':\n                        # Hmmm. Error is on top of stack, we'll just nuke input\n                        # symbol and continue\n                        #--! TRACKING\n                        if tracking:\n                            sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)\n                            sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)\n                        #--! TRACKING\n                        lookahead = None\n                        continue\n\n                    # Create the error symbol for the first time and make it the new lookahead symbol\n                    t = YaccSymbol()\n                    t.type = 'error'\n\n                    if hasattr(lookahead, 'lineno'):\n                        t.lineno = t.endlineno = lookahead.lineno\n                    if hasattr(lookahead, 'lexpos'):\n                        t.lexpos = t.endlexpos = lookahead.lexpos\n                    t.value = lookahead\n                    lookaheadstack.append(lookahead)\n                    lookahead = t\n                else:\n                    sym = symstack.pop()\n                    #--! TRACKING\n                    if tracking:\n                        lookahead.lineno = sym.lineno\n                        lookahead.lexpos = sym.lexpos\n                    #--! TRACKING\n                    statestack.pop()\n                    state = statestack[-1]\n\n                continue\n\n            # Call an error function here\n            raise RuntimeError('yacc: internal parser error!!!\\n')\n\n        #--! parsedebug-end\n\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n    # parseopt().\n    #\n    # Optimized version of parse() method.  DO NOT EDIT THIS CODE DIRECTLY!\n    # This code is automatically generated by the ply/ygen.py script. Make\n    # changes to the parsedebug() method instead.\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n    def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):\n        #--! parseopt-start\n        lookahead = None                         # Current lookahead symbol\n        lookaheadstack = []                      # Stack of lookahead symbols\n        actions = self.action                    # Local reference to action table (to avoid lookup on self.)\n        goto    = self.goto                      # Local reference to goto table (to avoid lookup on self.)\n        prod    = self.productions               # Local reference to production list (to avoid lookup on self.)\n        defaulted_states = self.defaulted_states # Local reference to defaulted states\n        pslice  = YaccProduction(None)           # Production object passed to grammar rules\n        errorcount = 0                           # Used during error recovery\n\n\n        # If no lexer was given, we will try to use the lex module\n        if not lexer:\n            from . import lex\n            lexer = lex.lexer\n\n        # Set up the lexer and parser objects on pslice\n        pslice.lexer = lexer\n        pslice.parser = self\n\n        # If input was supplied, pass to lexer\n        if input is not None:\n            lexer.input(input)\n\n        if tokenfunc is None:\n            # Tokenize function\n            get_token = lexer.token\n        else:\n            get_token = tokenfunc\n\n        # Set the parser() token method (sometimes used in error recovery)\n        self.token = get_token\n\n        # Set up the state and symbol stacks\n\n        statestack = []                # Stack of parsing states\n        self.statestack = statestack\n        symstack   = []                # Stack of grammar symbols\n        self.symstack = symstack\n\n        pslice.stack = symstack         # Put in the production\n        errtoken   = None               # Err token\n\n        # The start state is assumed to be (0,$end)\n\n        statestack.append(0)\n        sym = YaccSymbol()\n        sym.type = '$end'\n        symstack.append(sym)\n        state = 0\n        while True:\n            # Get the next symbol on the input.  If a lookahead symbol\n            # is already set, we just use that. Otherwise, we'll pull\n            # the next token off of the lookaheadstack or from the lexer\n\n\n            if state not in defaulted_states:\n                if not lookahead:\n                    if not lookaheadstack:\n                        lookahead = get_token()     # Get the next token\n                    else:\n                        lookahead = lookaheadstack.pop()\n                    if not lookahead:\n                        lookahead = YaccSymbol()\n                        lookahead.type = '$end'\n\n                # Check the action table\n                ltype = lookahead.type\n                t = actions[state].get(ltype)\n            else:\n                t = defaulted_states[state]\n\n\n            if t is not None:\n                if t > 0:\n                    # shift a symbol on the stack\n                    statestack.append(t)\n                    state = t\n\n\n                    symstack.append(lookahead)\n                    lookahead = None\n\n                    # Decrease error count on successful shift\n                    if errorcount:\n                        errorcount -= 1\n                    continue\n\n                if t < 0:\n                    # reduce a symbol on the stack, emit a production\n                    p = prod[-t]\n                    pname = p.name\n                    plen  = p.len\n\n                    # Get production function\n                    sym = YaccSymbol()\n                    sym.type = pname       # Production name\n                    sym.value = None\n\n\n                    if plen:\n                        targ = symstack[-plen-1:]\n                        targ[0] = sym\n\n                        #--! TRACKING\n                        if tracking:\n                            t1 = targ[1]\n                            sym.lineno = t1.lineno\n                            sym.lexpos = t1.lexpos\n                            sym.filename = getattr(t1, 'filename', \"\")  # <mod NL>\n                            t1 = targ[-1]\n                            sym.endlineno = getattr(t1, 'endlineno', t1.lineno)\n                            sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)\n                        #--! TRACKING\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # below as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            del symstack[-plen:]\n                            self.state = state\n                            p.callable(pslice)\n                            del statestack[-plen:]\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            symstack.extend(targ[1:-1])         # Put the production slice back on the stack\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                    else:\n\n                        #--! TRACKING\n                        if tracking:\n                            sym.lineno = lexer.lineno\n                            sym.lexpos = lexer.lexpos\n                            sym.filename = getattr(lexer, 'filename', \"\")  # <mod NL>\n                        #--! TRACKING\n\n                        targ = [sym]\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # above as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            self.state = state\n                            p.callable(pslice)\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                if t == 0:\n                    n = symstack[-1]\n                    result = getattr(n, 'value', None)\n                    return result\n\n            if t is None:\n\n\n                # We have some kind of parsing error here.  To handle\n                # this, we are going to push the current token onto\n                # the tokenstack and replace it with an 'error' token.\n                # If there are any synchronization rules, they may\n                # catch it.\n                #\n                # In addition to pushing the error token, we call call\n                # the user defined p_error() function if this is the\n                # first syntax error.  This function is only called if\n                # errorcount == 0.\n                if errorcount == 0 or self.errorok:\n                    errorcount = error_count\n                    self.errorok = False\n                    errtoken = lookahead\n                    if errtoken.type == '$end':\n                        errtoken = None               # End of file!\n                    if self.errorfunc:\n                        if errtoken and not hasattr(errtoken, 'lexer'):\n                            errtoken.lexer = lexer\n                        self.state = state\n                        tok = call_errorfunc(self.errorfunc, errtoken, self)\n                        if self.errorok:\n                            # User must have done some kind of panic\n                            # mode recovery on their own.  The\n                            # returned token is the next lookahead\n                            lookahead = tok\n                            errtoken = None\n                            continue\n                    else:\n                        if errtoken:\n                            if hasattr(errtoken, 'lineno'):\n                                lineno = lookahead.lineno\n                            else:\n                                lineno = 0\n                            if lineno:\n                                sys.stderr.write('yacc: Syntax error at line %d, token=%s\\n' % (lineno, errtoken.type))\n                            else:\n                                sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)\n                        else:\n                            sys.stderr.write('yacc: Parse error in input. EOF\\n')\n                            return\n\n                else:\n                    errorcount = error_count\n\n                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the\n                # entire parse has been rolled back and we're completely hosed.   The token is\n                # discarded and we just keep going.\n\n                if len(statestack) <= 1 and lookahead.type != '$end':\n                    lookahead = None\n                    errtoken = None\n                    state = 0\n                    # Nuke the pushback stack\n                    del lookaheadstack[:]\n                    continue\n\n                # case 2: the statestack has a couple of entries on it, but we're\n                # at the end of the file. nuke the top entry and generate an error token\n\n                # Start nuking entries on the stack\n                if lookahead.type == '$end':\n                    # Whoa. We're really hosed here. Bail out\n                    return\n\n                if lookahead.type != 'error':\n                    sym = symstack[-1]\n                    if sym.type == 'error':\n                        # Hmmm. Error is on top of stack, we'll just nuke input\n                        # symbol and continue\n                        #--! TRACKING\n                        if tracking:\n                            sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)\n                            sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)\n                        #--! TRACKING\n                        lookahead = None\n                        continue\n\n                    # Create the error symbol for the first time and make it the new lookahead symbol\n                    t = YaccSymbol()\n                    t.type = 'error'\n\n                    if hasattr(lookahead, 'lineno'):\n                        t.lineno = t.endlineno = lookahead.lineno\n                    if hasattr(lookahead, 'lexpos'):\n                        t.lexpos = t.endlexpos = lookahead.lexpos\n                    t.value = lookahead\n                    lookaheadstack.append(lookahead)\n                    lookahead = t\n                else:\n                    sym = symstack.pop()\n                    #--! TRACKING\n                    if tracking:\n                        lookahead.lineno = sym.lineno\n                        lookahead.lexpos = sym.lexpos\n                    #--! TRACKING\n                    statestack.pop()\n                    state = statestack[-1]\n\n                continue\n\n            # Call an error function here\n            raise RuntimeError('yacc: internal parser error!!!\\n')\n\n        #--! parseopt-end\n\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n    # parseopt_notrack().\n    #\n    # Optimized version of parseopt() with line number tracking removed.\n    # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated\n    # by the ply/ygen.py script. Make changes to the parsedebug() method instead.\n    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n    def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):\n        #--! parseopt-notrack-start\n        lookahead = None                         # Current lookahead symbol\n        lookaheadstack = []                      # Stack of lookahead symbols\n        actions = self.action                    # Local reference to action table (to avoid lookup on self.)\n        goto    = self.goto                      # Local reference to goto table (to avoid lookup on self.)\n        prod    = self.productions               # Local reference to production list (to avoid lookup on self.)\n        defaulted_states = self.defaulted_states # Local reference to defaulted states\n        pslice  = YaccProduction(None)           # Production object passed to grammar rules\n        errorcount = 0                           # Used during error recovery\n\n\n        # If no lexer was given, we will try to use the lex module\n        if not lexer:\n            from . import lex\n            lexer = lex.lexer\n\n        # Set up the lexer and parser objects on pslice\n        pslice.lexer = lexer\n        pslice.parser = self\n\n        # If input was supplied, pass to lexer\n        if input is not None:\n            lexer.input(input)\n\n        if tokenfunc is None:\n            # Tokenize function\n            get_token = lexer.token\n        else:\n            get_token = tokenfunc\n\n        # Set the parser() token method (sometimes used in error recovery)\n        self.token = get_token\n\n        # Set up the state and symbol stacks\n\n        statestack = []                # Stack of parsing states\n        self.statestack = statestack\n        symstack   = []                # Stack of grammar symbols\n        self.symstack = symstack\n\n        pslice.stack = symstack         # Put in the production\n        errtoken   = None               # Err token\n\n        # The start state is assumed to be (0,$end)\n\n        statestack.append(0)\n        sym = YaccSymbol()\n        sym.type = '$end'\n        symstack.append(sym)\n        state = 0\n        while True:\n            # Get the next symbol on the input.  If a lookahead symbol\n            # is already set, we just use that. Otherwise, we'll pull\n            # the next token off of the lookaheadstack or from the lexer\n\n\n            if state not in defaulted_states:\n                if not lookahead:\n                    if not lookaheadstack:\n                        lookahead = get_token()     # Get the next token\n                    else:\n                        lookahead = lookaheadstack.pop()\n                    if not lookahead:\n                        lookahead = YaccSymbol()\n                        lookahead.type = '$end'\n\n                # Check the action table\n                ltype = lookahead.type\n                t = actions[state].get(ltype)\n            else:\n                t = defaulted_states[state]\n\n\n            if t is not None:\n                if t > 0:\n                    # shift a symbol on the stack\n                    statestack.append(t)\n                    state = t\n\n\n                    symstack.append(lookahead)\n                    lookahead = None\n\n                    # Decrease error count on successful shift\n                    if errorcount:\n                        errorcount -= 1\n                    continue\n\n                if t < 0:\n                    # reduce a symbol on the stack, emit a production\n                    p = prod[-t]\n                    pname = p.name\n                    plen  = p.len\n\n                    # Get production function\n                    sym = YaccSymbol()\n                    sym.type = pname       # Production name\n                    sym.value = None\n\n\n                    if plen:\n                        targ = symstack[-plen-1:]\n                        targ[0] = sym\n\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # below as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            del symstack[-plen:]\n                            self.state = state\n                            p.callable(pslice)\n                            del statestack[-plen:]\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            symstack.extend(targ[1:-1])         # Put the production slice back on the stack\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                    else:\n\n\n                        targ = [sym]\n\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n                        # The code enclosed in this section is duplicated\n                        # above as a performance optimization.  Make sure\n                        # changes get made in both locations.\n\n                        pslice.slice = targ\n\n                        try:\n                            # Call the grammar rule with our special slice object\n                            self.state = state\n                            p.callable(pslice)\n                            symstack.append(sym)\n                            state = goto[statestack[-1]][pname]\n                            statestack.append(state)\n                        except SyntaxError:\n                            # If an error was set. Enter error recovery state\n                            lookaheadstack.append(lookahead)    # Save the current lookahead token\n                            statestack.pop()                    # Pop back one state (before the reduce)\n                            state = statestack[-1]\n                            sym.type = 'error'\n                            sym.value = 'error'\n                            lookahead = sym\n                            errorcount = error_count\n                            self.errorok = False\n\n                        continue\n                        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n                if t == 0:\n                    n = symstack[-1]\n                    result = getattr(n, 'value', None)\n                    return result\n\n            if t is None:\n\n\n                # We have some kind of parsing error here.  To handle\n                # this, we are going to push the current token onto\n                # the tokenstack and replace it with an 'error' token.\n                # If there are any synchronization rules, they may\n                # catch it.\n                #\n                # In addition to pushing the error token, we call call\n                # the user defined p_error() function if this is the\n                # first syntax error.  This function is only called if\n                # errorcount == 0.\n                if errorcount == 0 or self.errorok:\n                    errorcount = error_count\n                    self.errorok = False\n                    errtoken = lookahead\n                    if errtoken.type == '$end':\n                        errtoken = None               # End of file!\n                    if self.errorfunc:\n                        if errtoken and not hasattr(errtoken, 'lexer'):\n                            errtoken.lexer = lexer\n                        self.state = state\n                        tok = call_errorfunc(self.errorfunc, errtoken, self)\n                        if self.errorok:\n                            # User must have done some kind of panic\n                            # mode recovery on their own.  The\n                            # returned token is the next lookahead\n                            lookahead = tok\n                            errtoken = None\n                            continue\n                    else:\n                        if errtoken:\n                            if hasattr(errtoken, 'lineno'):\n                                lineno = lookahead.lineno\n                            else:\n                                lineno = 0\n                            if lineno:\n                                sys.stderr.write('yacc: Syntax error at line %d, token=%s\\n' % (lineno, errtoken.type))\n                            else:\n                                sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)\n                        else:\n                            sys.stderr.write('yacc: Parse error in input. EOF\\n')\n                            return\n\n                else:\n                    errorcount = error_count\n\n                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the\n                # entire parse has been rolled back and we're completely hosed.   The token is\n                # discarded and we just keep going.\n\n                if len(statestack) <= 1 and lookahead.type != '$end':\n                    lookahead = None\n                    errtoken = None\n                    state = 0\n                    # Nuke the pushback stack\n                    del lookaheadstack[:]\n                    continue\n\n                # case 2: the statestack has a couple of entries on it, but we're\n                # at the end of the file. nuke the top entry and generate an error token\n\n                # Start nuking entries on the stack\n                if lookahead.type == '$end':\n                    # Whoa. We're really hosed here. Bail out\n                    return\n\n                if lookahead.type != 'error':\n                    sym = symstack[-1]\n                    if sym.type == 'error':\n                        # Hmmm. Error is on top of stack, we'll just nuke input\n                        # symbol and continue\n                        lookahead = None\n                        continue\n\n                    # Create the error symbol for the first time and make it the new lookahead symbol\n                    t = YaccSymbol()\n                    t.type = 'error'\n\n                    if hasattr(lookahead, 'lineno'):\n                        t.lineno = t.endlineno = lookahead.lineno\n                    if hasattr(lookahead, 'lexpos'):\n                        t.lexpos = t.endlexpos = lookahead.lexpos\n                    t.value = lookahead\n                    lookaheadstack.append(lookahead)\n                    lookahead = t\n                else:\n                    sym = symstack.pop()\n                    statestack.pop()\n                    state = statestack[-1]\n\n                continue\n\n            # Call an error function here\n            raise RuntimeError('yacc: internal parser error!!!\\n')\n\n        #--! parseopt-notrack-end\n\n# -----------------------------------------------------------------------------\n#                          === Grammar Representation ===\n#\n# The following functions, classes, and variables are used to represent and\n# manipulate the rules that make up a grammar.\n# -----------------------------------------------------------------------------\n\n# regex matching identifiers\n_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')\n\n# -----------------------------------------------------------------------------\n# class Production:\n#\n# This class stores the raw information about a single production or grammar rule.\n# A grammar rule refers to a specification such as this:\n#\n#       expr : expr PLUS term\n#\n# Here are the basic attributes defined on all productions\n#\n#       name     - Name of the production.  For example 'expr'\n#       prod     - A list of symbols on the right side ['expr','PLUS','term']\n#       prec     - Production precedence level\n#       number   - Production number.\n#       func     - Function that executes on reduce\n#       file     - File where production function is defined\n#       lineno   - Line number where production function is defined\n#\n# The following attributes are defined or optional.\n#\n#       len       - Length of the production (number of symbols on right hand side)\n#       usyms     - Set of unique symbols found in the production\n# -----------------------------------------------------------------------------\n\nclass Production(object):\n    reduced = 0\n    def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):\n        self.name     = name\n        self.prod     = tuple(prod)\n        self.number   = number\n        self.func     = func\n        self.callable = None\n        self.file     = file\n        self.line     = line\n        self.prec     = precedence\n\n        # Internal settings used during table construction\n\n        self.len  = len(self.prod)   # Length of the production\n\n        # Create a list of unique production symbols used in the production\n        self.usyms = []\n        for s in self.prod:\n            if s not in self.usyms:\n                self.usyms.append(s)\n\n        # List of all LR items for the production\n        self.lr_items = []\n        self.lr_next = None\n\n        # Create a string representation\n        if self.prod:\n            self.str = '%s -> %s' % (self.name, ' '.join(self.prod))\n        else:\n            self.str = '%s -> <empty>' % self.name\n\n    def __str__(self):\n        return self.str\n\n    def __repr__(self):\n        return 'Production(' + str(self) + ')'\n\n    def __len__(self):\n        return len(self.prod)\n\n    def __nonzero__(self):\n        return 1\n\n    def __getitem__(self, index):\n        return self.prod[index]\n\n    # Return the nth lr_item from the production (or None if at the end)\n    def lr_item(self, n):\n        if n > len(self.prod):\n            return None\n        p = LRItem(self, n)\n        # Precompute the list of productions immediately following.\n        try:\n            p.lr_after = self.Prodnames[p.prod[n+1]]\n        except (IndexError, KeyError):\n            p.lr_after = []\n        try:\n            p.lr_before = p.prod[n-1]\n        except IndexError:\n            p.lr_before = None\n        return p\n\n    # Bind the production function name to a callable\n    def bind(self, pdict):\n        if self.func:\n            self.callable = pdict[self.func]\n\n# This class serves as a minimal standin for Production objects when\n# reading table data from files.   It only contains information\n# actually used by the LR parsing engine, plus some additional\n# debugging information.\nclass MiniProduction(object):\n    def __init__(self, str, name, len, func, file, line):\n        self.name     = name\n        self.len      = len\n        self.func     = func\n        self.callable = None\n        self.file     = file\n        self.line     = line\n        self.str      = str\n\n    def __str__(self):\n        return self.str\n\n    def __repr__(self):\n        return 'MiniProduction(%s)' % self.str\n\n    # Bind the production function name to a callable\n    def bind(self, pdict):\n        if self.func:\n            self.callable = pdict[self.func]\n\n\n# -----------------------------------------------------------------------------\n# class LRItem\n#\n# This class represents a specific stage of parsing a production rule.  For\n# example:\n#\n#       expr : expr . PLUS term\n#\n# In the above, the \".\" represents the current location of the parse.  Here\n# basic attributes:\n#\n#       name       - Name of the production.  For example 'expr'\n#       prod       - A list of symbols on the right side ['expr','.', 'PLUS','term']\n#       number     - Production number.\n#\n#       lr_next      Next LR item. Example, if we are ' expr -> expr . PLUS term'\n#                    then lr_next refers to 'expr -> expr PLUS . term'\n#       lr_index   - LR item index (location of the \".\") in the prod list.\n#       lookaheads - LALR lookahead symbols for this item\n#       len        - Length of the production (number of symbols on right hand side)\n#       lr_after    - List of all productions that immediately follow\n#       lr_before   - Grammar symbol immediately before\n# -----------------------------------------------------------------------------\n\nclass LRItem(object):\n    def __init__(self, p, n):\n        self.name       = p.name\n        self.prod       = list(p.prod)\n        self.number     = p.number\n        self.lr_index   = n\n        self.lookaheads = {}\n        self.prod.insert(n, '.')\n        self.prod       = tuple(self.prod)\n        self.len        = len(self.prod)\n        self.usyms      = p.usyms\n\n    def __str__(self):\n        if self.prod:\n            s = '%s -> %s' % (self.name, ' '.join(self.prod))\n        else:\n            s = '%s -> <empty>' % self.name\n        return s\n\n    def __repr__(self):\n        return 'LRItem(' + str(self) + ')'\n\n# -----------------------------------------------------------------------------\n# rightmost_terminal()\n#\n# Return the rightmost terminal from a list of symbols.  Used in add_production()\n# -----------------------------------------------------------------------------\ndef rightmost_terminal(symbols, terminals):\n    i = len(symbols) - 1\n    while i >= 0:\n        if symbols[i] in terminals:\n            return symbols[i]\n        i -= 1\n    return None\n\n# -----------------------------------------------------------------------------\n#                           === GRAMMAR CLASS ===\n#\n# The following class represents the contents of the specified grammar along\n# with various computed properties such as first sets, follow sets, LR items, etc.\n# This data is used for critical parts of the table generation process later.\n# -----------------------------------------------------------------------------\n\nclass GrammarError(YaccError):\n    pass\n\nclass Grammar(object):\n    def __init__(self, terminals):\n        self.Productions  = [None]  # A list of all of the productions.  The first\n                                    # entry is always reserved for the purpose of\n                                    # building an augmented grammar\n\n        self.Prodnames    = {}      # A dictionary mapping the names of nonterminals to a list of all\n                                    # productions of that nonterminal.\n\n        self.Prodmap      = {}      # A dictionary that is only used to detect duplicate\n                                    # productions.\n\n        self.Terminals    = {}      # A dictionary mapping the names of terminal symbols to a\n                                    # list of the rules where they are used.\n\n        for term in terminals:\n            self.Terminals[term] = []\n\n        self.Terminals['error'] = []\n\n        self.Nonterminals = {}      # A dictionary mapping names of nonterminals to a list\n                                    # of rule numbers where they are used.\n\n        self.First        = {}      # A dictionary of precomputed FIRST(x) symbols\n\n        self.Follow       = {}      # A dictionary of precomputed FOLLOW(x) symbols\n\n        self.Precedence   = {}      # Precedence rules for each terminal. Contains tuples of the\n                                    # form ('right',level) or ('nonassoc', level) or ('left',level)\n\n        self.UsedPrecedence = set() # Precedence rules that were actually used by the grammar.\n                                    # This is only used to provide error checking and to generate\n                                    # a warning about unused precedence rules.\n\n        self.Start = None           # Starting symbol for the grammar\n\n\n    def __len__(self):\n        return len(self.Productions)\n\n    def __getitem__(self, index):\n        return self.Productions[index]\n\n    # -----------------------------------------------------------------------------\n    # set_precedence()\n    #\n    # Sets the precedence for a given terminal. assoc is the associativity such as\n    # 'left','right', or 'nonassoc'.  level is a numeric level.\n    #\n    # -----------------------------------------------------------------------------\n\n    def set_precedence(self, term, assoc, level):\n        assert self.Productions == [None], 'Must call set_precedence() before add_production()'\n        if term in self.Precedence:\n            raise GrammarError('Precedence already specified for terminal %r' % term)\n        if assoc not in ['left', 'right', 'nonassoc']:\n            raise GrammarError(\"Associativity must be one of 'left','right', or 'nonassoc'\")\n        self.Precedence[term] = (assoc, level)\n\n    # -----------------------------------------------------------------------------\n    # add_production()\n    #\n    # Given an action function, this function assembles a production rule and\n    # computes its precedence level.\n    #\n    # The production rule is supplied as a list of symbols.   For example,\n    # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and\n    # symbols ['expr','PLUS','term'].\n    #\n    # Precedence is determined by the precedence of the right-most non-terminal\n    # or the precedence of a terminal specified by %prec.\n    #\n    # A variety of error checks are performed to make sure production symbols\n    # are valid and that %prec is used correctly.\n    # -----------------------------------------------------------------------------\n\n    def add_production(self, prodname, syms, func=None, file='', line=0):\n\n        if prodname in self.Terminals:\n            raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))\n        if prodname == 'error':\n            raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))\n        if not _is_identifier.match(prodname):\n            raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))\n\n        # Look for literal tokens\n        for n, s in enumerate(syms):\n            if s[0] in \"'\\\"\":\n                try:\n                    c = eval(s)\n                    if (len(c) > 1):\n                        raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %\n                                           (file, line, s, prodname))\n                    if c not in self.Terminals:\n                        self.Terminals[c] = []\n                    syms[n] = c\n                    continue\n                except SyntaxError:\n                    pass\n            if not _is_identifier.match(s) and s != '%prec':\n                raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))\n\n        # Determine the precedence level\n        if '%prec' in syms:\n            if syms[-1] == '%prec':\n                raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))\n            if syms[-2] != '%prec':\n                raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %\n                                   (file, line))\n            precname = syms[-1]\n            prodprec = self.Precedence.get(precname)\n            if not prodprec:\n                raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))\n            else:\n                self.UsedPrecedence.add(precname)\n            del syms[-2:]     # Drop %prec from the rule\n        else:\n            # If no %prec, precedence is determined by the rightmost terminal symbol\n            precname = rightmost_terminal(syms, self.Terminals)\n            prodprec = self.Precedence.get(precname, ('right', 0))\n\n        # See if the rule is already in the rulemap\n        map = '%s -> %s' % (prodname, syms)\n        if map in self.Prodmap:\n            m = self.Prodmap[map]\n            raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +\n                               'Previous definition at %s:%d' % (m.file, m.line))\n\n        # From this point on, everything is valid.  Create a new Production instance\n        pnumber  = len(self.Productions)\n        if prodname not in self.Nonterminals:\n            self.Nonterminals[prodname] = []\n\n        # Add the production number to Terminals and Nonterminals\n        for t in syms:\n            if t in self.Terminals:\n                self.Terminals[t].append(pnumber)\n            else:\n                if t not in self.Nonterminals:\n                    self.Nonterminals[t] = []\n                self.Nonterminals[t].append(pnumber)\n\n        # Create a production and add it to the list of productions\n        p = Production(pnumber, prodname, syms, prodprec, func, file, line)\n        self.Productions.append(p)\n        self.Prodmap[map] = p\n\n        # Add to the global productions list\n        try:\n            self.Prodnames[prodname].append(p)\n        except KeyError:\n            self.Prodnames[prodname] = [p]\n\n    # -----------------------------------------------------------------------------\n    # set_start()\n    #\n    # Sets the starting symbol and creates the augmented grammar.  Production\n    # rule 0 is S' -> start where start is the start symbol.\n    # -----------------------------------------------------------------------------\n\n    def set_start(self, start=None):\n        if not start:\n            start = self.Productions[1].name\n        if start not in self.Nonterminals:\n            raise GrammarError('start symbol %s undefined' % start)\n        self.Productions[0] = Production(0, \"S'\", [start])\n        self.Nonterminals[start].append(0)\n        self.Start = start\n\n    # -----------------------------------------------------------------------------\n    # find_unreachable()\n    #\n    # Find all of the nonterminal symbols that can't be reached from the starting\n    # symbol.  Returns a list of nonterminals that can't be reached.\n    # -----------------------------------------------------------------------------\n\n    def find_unreachable(self):\n\n        # Mark all symbols that are reachable from a symbol s\n        def mark_reachable_from(s):\n            if s in reachable:\n                return\n            reachable.add(s)\n            for p in self.Prodnames.get(s, []):\n                for r in p.prod:\n                    mark_reachable_from(r)\n\n        reachable = set()\n        mark_reachable_from(self.Productions[0].prod[0])\n        return [s for s in self.Nonterminals if s not in reachable]\n\n    # -----------------------------------------------------------------------------\n    # infinite_cycles()\n    #\n    # This function looks at the various parsing rules and tries to detect\n    # infinite recursion cycles (grammar rules where there is no possible way\n    # to derive a string of only terminals).\n    # -----------------------------------------------------------------------------\n\n    def infinite_cycles(self):\n        terminates = {}\n\n        # Terminals:\n        for t in self.Terminals:\n            terminates[t] = True\n\n        terminates['$end'] = True\n\n        # Nonterminals:\n\n        # Initialize to false:\n        for n in self.Nonterminals:\n            terminates[n] = False\n\n        # Then propagate termination until no change:\n        while True:\n            some_change = False\n            for (n, pl) in self.Prodnames.items():\n                # Nonterminal n terminates iff any of its productions terminates.\n                for p in pl:\n                    # Production p terminates iff all of its rhs symbols terminate.\n                    for s in p.prod:\n                        if not terminates[s]:\n                            # The symbol s does not terminate,\n                            # so production p does not terminate.\n                            p_terminates = False\n                            break\n                    else:\n                        # didn't break from the loop,\n                        # so every symbol s terminates\n                        # so production p terminates.\n                        p_terminates = True\n\n                    if p_terminates:\n                        # symbol n terminates!\n                        if not terminates[n]:\n                            terminates[n] = True\n                            some_change = True\n                        # Don't need to consider any more productions for this n.\n                        break\n\n            if not some_change:\n                break\n\n        infinite = []\n        for (s, term) in terminates.items():\n            if not term:\n                if s not in self.Prodnames and s not in self.Terminals and s != 'error':\n                    # s is used-but-not-defined, and we've already warned of that,\n                    # so it would be overkill to say that it's also non-terminating.\n                    pass\n                else:\n                    infinite.append(s)\n\n        return infinite\n\n    # -----------------------------------------------------------------------------\n    # undefined_symbols()\n    #\n    # Find all symbols that were used the grammar, but not defined as tokens or\n    # grammar rules.  Returns a list of tuples (sym, prod) where sym in the symbol\n    # and prod is the production where the symbol was used.\n    # -----------------------------------------------------------------------------\n    def undefined_symbols(self):\n        result = []\n        for p in self.Productions:\n            if not p:\n                continue\n\n            for s in p.prod:\n                if s not in self.Prodnames and s not in self.Terminals and s != 'error':\n                    result.append((s, p))\n        return result\n\n    # -----------------------------------------------------------------------------\n    # unused_terminals()\n    #\n    # Find all terminals that were defined, but not used by the grammar.  Returns\n    # a list of all symbols.\n    # -----------------------------------------------------------------------------\n    def unused_terminals(self):\n        unused_tok = []\n        for s, v in self.Terminals.items():\n            if s != 'error' and not v:\n                unused_tok.append(s)\n\n        return unused_tok\n\n    # ------------------------------------------------------------------------------\n    # unused_rules()\n    #\n    # Find all grammar rules that were defined,  but not used (maybe not reachable)\n    # Returns a list of productions.\n    # ------------------------------------------------------------------------------\n\n    def unused_rules(self):\n        unused_prod = []\n        for s, v in self.Nonterminals.items():\n            if not v:\n                p = self.Prodnames[s][0]\n                unused_prod.append(p)\n        return unused_prod\n\n    # -----------------------------------------------------------------------------\n    # unused_precedence()\n    #\n    # Returns a list of tuples (term,precedence) corresponding to precedence\n    # rules that were never used by the grammar.  term is the name of the terminal\n    # on which precedence was applied and precedence is a string such as 'left' or\n    # 'right' corresponding to the type of precedence.\n    # -----------------------------------------------------------------------------\n\n    def unused_precedence(self):\n        unused = []\n        for termname in self.Precedence:\n            if not (termname in self.Terminals or termname in self.UsedPrecedence):\n                unused.append((termname, self.Precedence[termname][0]))\n\n        return unused\n\n    # -------------------------------------------------------------------------\n    # _first()\n    #\n    # Compute the value of FIRST1(beta) where beta is a tuple of symbols.\n    #\n    # During execution of compute_first1, the result may be incomplete.\n    # Afterward (e.g., when called from compute_follow()), it will be complete.\n    # -------------------------------------------------------------------------\n    def _first(self, beta):\n\n        # We are computing First(x1,x2,x3,...,xn)\n        result = []\n        for x in beta:\n            x_produces_empty = False\n\n            # Add all the non-<empty> symbols of First[x] to the result.\n            for f in self.First[x]:\n                if f == '<empty>':\n                    x_produces_empty = True\n                else:\n                    if f not in result:\n                        result.append(f)\n\n            if x_produces_empty:\n                # We have to consider the next x in beta,\n                # i.e. stay in the loop.\n                pass\n            else:\n                # We don't have to consider any further symbols in beta.\n                break\n        else:\n            # There was no 'break' from the loop,\n            # so x_produces_empty was true for all x in beta,\n            # so beta produces empty as well.\n            result.append('<empty>')\n\n        return result\n\n    # -------------------------------------------------------------------------\n    # compute_first()\n    #\n    # Compute the value of FIRST1(X) for all symbols\n    # -------------------------------------------------------------------------\n    def compute_first(self):\n        if self.First:\n            return self.First\n\n        # Terminals:\n        for t in self.Terminals:\n            self.First[t] = [t]\n\n        self.First['$end'] = ['$end']\n\n        # Nonterminals:\n\n        # Initialize to the empty set:\n        for n in self.Nonterminals:\n            self.First[n] = []\n\n        # Then propagate symbols until no change:\n        while True:\n            some_change = False\n            for n in self.Nonterminals:\n                for p in self.Prodnames[n]:\n                    for f in self._first(p.prod):\n                        if f not in self.First[n]:\n                            self.First[n].append(f)\n                            some_change = True\n            if not some_change:\n                break\n\n        return self.First\n\n    # ---------------------------------------------------------------------\n    # compute_follow()\n    #\n    # Computes all of the follow sets for every non-terminal symbol.  The\n    # follow set is the set of all symbols that might follow a given\n    # non-terminal.  See the Dragon book, 2nd Ed. p. 189.\n    # ---------------------------------------------------------------------\n    def compute_follow(self, start=None):\n        # If already computed, return the result\n        if self.Follow:\n            return self.Follow\n\n        # If first sets not computed yet, do that first.\n        if not self.First:\n            self.compute_first()\n\n        # Add '$end' to the follow list of the start symbol\n        for k in self.Nonterminals:\n            self.Follow[k] = []\n\n        if not start:\n            start = self.Productions[1].name\n\n        self.Follow[start] = ['$end']\n\n        while True:\n            didadd = False\n            for p in self.Productions[1:]:\n                # Here is the production set\n                for i, B in enumerate(p.prod):\n                    if B in self.Nonterminals:\n                        # Okay. We got a non-terminal in a production\n                        fst = self._first(p.prod[i+1:])\n                        hasempty = False\n                        for f in fst:\n                            if f != '<empty>' and f not in self.Follow[B]:\n                                self.Follow[B].append(f)\n                                didadd = True\n                            if f == '<empty>':\n                                hasempty = True\n                        if hasempty or i == (len(p.prod)-1):\n                            # Add elements of follow(a) to follow(b)\n                            for f in self.Follow[p.name]:\n                                if f not in self.Follow[B]:\n                                    self.Follow[B].append(f)\n                                    didadd = True\n            if not didadd:\n                break\n        return self.Follow\n\n\n    # -----------------------------------------------------------------------------\n    # build_lritems()\n    #\n    # This function walks the list of productions and builds a complete set of the\n    # LR items.  The LR items are stored in two ways:  First, they are uniquely\n    # numbered and placed in the list _lritems.  Second, a linked list of LR items\n    # is built for each production.  For example:\n    #\n    #   E -> E PLUS E\n    #\n    # Creates the list\n    #\n    #  [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]\n    # -----------------------------------------------------------------------------\n\n    def build_lritems(self):\n        for p in self.Productions:\n            lastlri = p\n            i = 0\n            lr_items = []\n            while True:\n                if i > len(p):\n                    lri = None\n                else:\n                    lri = LRItem(p, i)\n                    # Precompute the list of productions immediately following\n                    try:\n                        lri.lr_after = self.Prodnames[lri.prod[i+1]]\n                    except (IndexError, KeyError):\n                        lri.lr_after = []\n                    try:\n                        lri.lr_before = lri.prod[i-1]\n                    except IndexError:\n                        lri.lr_before = None\n\n                lastlri.lr_next = lri\n                if not lri:\n                    break\n                lr_items.append(lri)\n                lastlri = lri\n                i += 1\n            p.lr_items = lr_items\n\n# -----------------------------------------------------------------------------\n#                            == Class LRTable ==\n#\n# This basic class represents a basic table of LR parsing information.\n# Methods for generating the tables are not defined here.  They are defined\n# in the derived class LRGeneratedTable.\n# -----------------------------------------------------------------------------\n\nclass VersionError(YaccError):\n    pass\n\nclass LRTable(object):\n    def __init__(self):\n        self.lr_action = None\n        self.lr_goto = None\n        self.lr_productions = None\n        self.lr_method = None\n\n    def read_table(self, module):\n        if isinstance(module, types.ModuleType):\n            parsetab = module\n        else:\n            exec('import %s' % module)\n            parsetab = sys.modules[module]\n\n        if parsetab._tabversion != __tabversion__:\n            raise VersionError('yacc table file version is out of date')\n\n        self.lr_action = parsetab._lr_action\n        self.lr_goto = parsetab._lr_goto\n\n        self.lr_productions = []\n        for p in parsetab._lr_productions:\n            self.lr_productions.append(MiniProduction(*p))\n\n        self.lr_method = parsetab._lr_method\n        return parsetab._lr_signature\n\n    def read_pickle(self, filename):\n        try:\n            import cPickle as pickle\n        except ImportError:\n            import pickle\n\n        if not os.path.exists(filename):\n          raise ImportError\n\n        in_f = open(filename, 'rb')\n\n        tabversion = pickle.load(in_f)\n        if tabversion != __tabversion__:\n            raise VersionError('yacc table file version is out of date')\n        self.lr_method = pickle.load(in_f)\n        signature      = pickle.load(in_f)\n        self.lr_action = pickle.load(in_f)\n        self.lr_goto   = pickle.load(in_f)\n        productions    = pickle.load(in_f)\n\n        self.lr_productions = []\n        for p in productions:\n            self.lr_productions.append(MiniProduction(*p))\n\n        in_f.close()\n        return signature\n\n    # Bind all production function names to callable objects in pdict\n    def bind_callables(self, pdict):\n        for p in self.lr_productions:\n            p.bind(pdict)\n\n\n# -----------------------------------------------------------------------------\n#                           === LR Generator ===\n#\n# The following classes and functions are used to generate LR parsing tables on\n# a grammar.\n# -----------------------------------------------------------------------------\n\n# -----------------------------------------------------------------------------\n# digraph()\n# traverse()\n#\n# The following two functions are used to compute set valued functions\n# of the form:\n#\n#     F(x) = F'(x) U U{F(y) | x R y}\n#\n# This is used to compute the values of Read() sets as well as FOLLOW sets\n# in LALR(1) generation.\n#\n# Inputs:  X    - An input set\n#          R    - A relation\n#          FP   - Set-valued function\n# ------------------------------------------------------------------------------\n\ndef digraph(X, R, FP):\n    N = {}\n    for x in X:\n        N[x] = 0\n    stack = []\n    F = {}\n    for x in X:\n        if N[x] == 0:\n            traverse(x, N, stack, F, X, R, FP)\n    return F\n\ndef traverse(x, N, stack, F, X, R, FP):\n    stack.append(x)\n    d = len(stack)\n    N[x] = d\n    F[x] = FP(x)             # F(X) <- F'(x)\n\n    rel = R(x)               # Get y's related to x\n    for y in rel:\n        if N[y] == 0:\n            traverse(y, N, stack, F, X, R, FP)\n        N[x] = min(N[x], N[y])\n        for a in F.get(y, []):\n            if a not in F[x]:\n                F[x].append(a)\n    if N[x] == d:\n        N[stack[-1]] = MAXINT\n        F[stack[-1]] = F[x]\n        element = stack.pop()\n        while element != x:\n            N[stack[-1]] = MAXINT\n            F[stack[-1]] = F[x]\n            element = stack.pop()\n\nclass LALRError(YaccError):\n    pass\n\n# -----------------------------------------------------------------------------\n#                             == LRGeneratedTable ==\n#\n# This class implements the LR table generation algorithm.  There are no\n# public methods except for write()\n# -----------------------------------------------------------------------------\n\nclass LRGeneratedTable(LRTable):\n    def __init__(self, grammar, method='LALR', log=None):\n        if method not in ['SLR', 'LALR']:\n            raise LALRError('Unsupported method %s' % method)\n\n        self.grammar = grammar\n        self.lr_method = method\n\n        # Set up the logger\n        if not log:\n            log = NullLogger()\n        self.log = log\n\n        # Internal attributes\n        self.lr_action     = {}        # Action table\n        self.lr_goto       = {}        # Goto table\n        self.lr_productions  = grammar.Productions    # Copy of grammar Production array\n        self.lr_goto_cache = {}        # Cache of computed gotos\n        self.lr0_cidhash   = {}        # Cache of closures\n\n        self._add_count    = 0         # Internal counter used to detect cycles\n\n        # Diagonistic information filled in by the table generator\n        self.sr_conflict   = 0\n        self.rr_conflict   = 0\n        self.conflicts     = []        # List of conflicts\n\n        self.sr_conflicts  = []\n        self.rr_conflicts  = []\n\n        # Build the tables\n        self.grammar.build_lritems()\n        self.grammar.compute_first()\n        self.grammar.compute_follow()\n        self.lr_parse_table()\n\n    # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.\n\n    def lr0_closure(self, I):\n        self._add_count += 1\n\n        # Add everything in I to J\n        J = I[:]\n        didadd = True\n        while didadd:\n            didadd = False\n            for j in J:\n                for x in j.lr_after:\n                    if getattr(x, 'lr0_added', 0) == self._add_count:\n                        continue\n                    # Add B --> .G to J\n                    J.append(x.lr_next)\n                    x.lr0_added = self._add_count\n                    didadd = True\n\n        return J\n\n    # Compute the LR(0) goto function goto(I,X) where I is a set\n    # of LR(0) items and X is a grammar symbol.   This function is written\n    # in a way that guarantees uniqueness of the generated goto sets\n    # (i.e. the same goto set will never be returned as two different Python\n    # objects).  With uniqueness, we can later do fast set comparisons using\n    # id(obj) instead of element-wise comparison.\n\n    def lr0_goto(self, I, x):\n        # First we look for a previously cached entry\n        g = self.lr_goto_cache.get((id(I), x))\n        if g:\n            return g\n\n        # Now we generate the goto set in a way that guarantees uniqueness\n        # of the result\n\n        s = self.lr_goto_cache.get(x)\n        if not s:\n            s = {}\n            self.lr_goto_cache[x] = s\n\n        gs = []\n        for p in I:\n            n = p.lr_next\n            if n and n.lr_before == x:\n                s1 = s.get(id(n))\n                if not s1:\n                    s1 = {}\n                    s[id(n)] = s1\n                gs.append(n)\n                s = s1\n        g = s.get('$end')\n        if not g:\n            if gs:\n                g = self.lr0_closure(gs)\n                s['$end'] = g\n            else:\n                s['$end'] = gs\n        self.lr_goto_cache[(id(I), x)] = g\n        return g\n\n    # Compute the LR(0) sets of item function\n    def lr0_items(self):\n        C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]\n        i = 0\n        for I in C:\n            self.lr0_cidhash[id(I)] = i\n            i += 1\n\n        # Loop over the items in C and each grammar symbols\n        i = 0\n        while i < len(C):\n            I = C[i]\n            i += 1\n\n            # Collect all of the symbols that could possibly be in the goto(I,X) sets\n            asyms = {}\n            for ii in I:\n                for s in ii.usyms:\n                    asyms[s] = None\n\n            for x in asyms:\n                g = self.lr0_goto(I, x)\n                if not g or id(g) in self.lr0_cidhash:\n                    continue\n                self.lr0_cidhash[id(g)] = len(C)\n                C.append(g)\n\n        return C\n\n    # -----------------------------------------------------------------------------\n    #                       ==== LALR(1) Parsing ====\n    #\n    # LALR(1) parsing is almost exactly the same as SLR except that instead of\n    # relying upon Follow() sets when performing reductions, a more selective\n    # lookahead set that incorporates the state of the LR(0) machine is utilized.\n    # Thus, we mainly just have to focus on calculating the lookahead sets.\n    #\n    # The method used here is due to DeRemer and Pennelo (1982).\n    #\n    # DeRemer, F. L., and T. J. Pennelo: \"Efficient Computation of LALR(1)\n    #     Lookahead Sets\", ACM Transactions on Programming Languages and Systems,\n    #     Vol. 4, No. 4, Oct. 1982, pp. 615-649\n    #\n    # Further details can also be found in:\n    #\n    #  J. Tremblay and P. Sorenson, \"The Theory and Practice of Compiler Writing\",\n    #      McGraw-Hill Book Company, (1985).\n    #\n    # -----------------------------------------------------------------------------\n\n    # -----------------------------------------------------------------------------\n    # compute_nullable_nonterminals()\n    #\n    # Creates a dictionary containing all of the non-terminals that might produce\n    # an empty production.\n    # -----------------------------------------------------------------------------\n\n    def compute_nullable_nonterminals(self):\n        nullable = set()\n        num_nullable = 0\n        while True:\n            for p in self.grammar.Productions[1:]:\n                if p.len == 0:\n                    nullable.add(p.name)\n                    continue\n                for t in p.prod:\n                    if t not in nullable:\n                        break\n                else:\n                    nullable.add(p.name)\n            if len(nullable) == num_nullable:\n                break\n            num_nullable = len(nullable)\n        return nullable\n\n    # -----------------------------------------------------------------------------\n    # find_nonterminal_trans(C)\n    #\n    # Given a set of LR(0) items, this functions finds all of the non-terminal\n    # transitions.    These are transitions in which a dot appears immediately before\n    # a non-terminal.   Returns a list of tuples of the form (state,N) where state\n    # is the state number and N is the nonterminal symbol.\n    #\n    # The input C is the set of LR(0) items.\n    # -----------------------------------------------------------------------------\n\n    def find_nonterminal_transitions(self, C):\n        trans = []\n        for stateno, state in enumerate(C):\n            for p in state:\n                if p.lr_index < p.len - 1:\n                    t = (stateno, p.prod[p.lr_index+1])\n                    if t[1] in self.grammar.Nonterminals:\n                        if t not in trans:\n                            trans.append(t)\n        return trans\n\n    # -----------------------------------------------------------------------------\n    # dr_relation()\n    #\n    # Computes the DR(p,A) relationships for non-terminal transitions.  The input\n    # is a tuple (state,N) where state is a number and N is a nonterminal symbol.\n    #\n    # Returns a list of terminals.\n    # -----------------------------------------------------------------------------\n\n    def dr_relation(self, C, trans, nullable):\n        state, N = trans\n        terms = []\n\n        g = self.lr0_goto(C[state], N)\n        for p in g:\n            if p.lr_index < p.len - 1:\n                a = p.prod[p.lr_index+1]\n                if a in self.grammar.Terminals:\n                    if a not in terms:\n                        terms.append(a)\n\n        # This extra bit is to handle the start state\n        if state == 0 and N == self.grammar.Productions[0].prod[0]:\n            terms.append('$end')\n\n        return terms\n\n    # -----------------------------------------------------------------------------\n    # reads_relation()\n    #\n    # Computes the READS() relation (p,A) READS (t,C).\n    # -----------------------------------------------------------------------------\n\n    def reads_relation(self, C, trans, empty):\n        # Look for empty transitions\n        rel = []\n        state, N = trans\n\n        g = self.lr0_goto(C[state], N)\n        j = self.lr0_cidhash.get(id(g), -1)\n        for p in g:\n            if p.lr_index < p.len - 1:\n                a = p.prod[p.lr_index + 1]\n                if a in empty:\n                    rel.append((j, a))\n\n        return rel\n\n    # -----------------------------------------------------------------------------\n    # compute_lookback_includes()\n    #\n    # Determines the lookback and includes relations\n    #\n    # LOOKBACK:\n    #\n    # This relation is determined by running the LR(0) state machine forward.\n    # For example, starting with a production \"N : . A B C\", we run it forward\n    # to obtain \"N : A B C .\"   We then build a relationship between this final\n    # state and the starting state.   These relationships are stored in a dictionary\n    # lookdict.\n    #\n    # INCLUDES:\n    #\n    # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).\n    #\n    # This relation is used to determine non-terminal transitions that occur\n    # inside of other non-terminal transition states.   (p,A) INCLUDES (p', B)\n    # if the following holds:\n    #\n    #       B -> LAT, where T -> epsilon and p' -L-> p\n    #\n    # L is essentially a prefix (which may be empty), T is a suffix that must be\n    # able to derive an empty string.  State p' must lead to state p with the string L.\n    #\n    # -----------------------------------------------------------------------------\n\n    def compute_lookback_includes(self, C, trans, nullable):\n        lookdict = {}          # Dictionary of lookback relations\n        includedict = {}       # Dictionary of include relations\n\n        # Make a dictionary of non-terminal transitions\n        dtrans = {}\n        for t in trans:\n            dtrans[t] = 1\n\n        # Loop over all transitions and compute lookbacks and includes\n        for state, N in trans:\n            lookb = []\n            includes = []\n            for p in C[state]:\n                if p.name != N:\n                    continue\n\n                # Okay, we have a name match.  We now follow the production all the way\n                # through the state machine until we get the . on the right hand side\n\n                lr_index = p.lr_index\n                j = state\n                while lr_index < p.len - 1:\n                    lr_index = lr_index + 1\n                    t = p.prod[lr_index]\n\n                    # Check to see if this symbol and state are a non-terminal transition\n                    if (j, t) in dtrans:\n                        # Yes.  Okay, there is some chance that this is an includes relation\n                        # the only way to know for certain is whether the rest of the\n                        # production derives empty\n\n                        li = lr_index + 1\n                        while li < p.len:\n                            if p.prod[li] in self.grammar.Terminals:\n                                break      # No forget it\n                            if p.prod[li] not in nullable:\n                                break\n                            li = li + 1\n                        else:\n                            # Appears to be a relation between (j,t) and (state,N)\n                            includes.append((j, t))\n\n                    g = self.lr0_goto(C[j], t)               # Go to next set\n                    j = self.lr0_cidhash.get(id(g), -1)      # Go to next state\n\n                # When we get here, j is the final state, now we have to locate the production\n                for r in C[j]:\n                    if r.name != p.name:\n                        continue\n                    if r.len != p.len:\n                        continue\n                    i = 0\n                    # This look is comparing a production \". A B C\" with \"A B C .\"\n                    while i < r.lr_index:\n                        if r.prod[i] != p.prod[i+1]:\n                            break\n                        i = i + 1\n                    else:\n                        lookb.append((j, r))\n            for i in includes:\n                if i not in includedict:\n                    includedict[i] = []\n                includedict[i].append((state, N))\n            lookdict[(state, N)] = lookb\n\n        return lookdict, includedict\n\n    # -----------------------------------------------------------------------------\n    # compute_read_sets()\n    #\n    # Given a set of LR(0) items, this function computes the read sets.\n    #\n    # Inputs:  C        =  Set of LR(0) items\n    #          ntrans   = Set of nonterminal transitions\n    #          nullable = Set of empty transitions\n    #\n    # Returns a set containing the read sets\n    # -----------------------------------------------------------------------------\n\n    def compute_read_sets(self, C, ntrans, nullable):\n        FP = lambda x: self.dr_relation(C, x, nullable)\n        R =  lambda x: self.reads_relation(C, x, nullable)\n        F = digraph(ntrans, R, FP)\n        return F\n\n    # -----------------------------------------------------------------------------\n    # compute_follow_sets()\n    #\n    # Given a set of LR(0) items, a set of non-terminal transitions, a readset,\n    # and an include set, this function computes the follow sets\n    #\n    # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}\n    #\n    # Inputs:\n    #            ntrans     = Set of nonterminal transitions\n    #            readsets   = Readset (previously computed)\n    #            inclsets   = Include sets (previously computed)\n    #\n    # Returns a set containing the follow sets\n    # -----------------------------------------------------------------------------\n\n    def compute_follow_sets(self, ntrans, readsets, inclsets):\n        FP = lambda x: readsets[x]\n        R  = lambda x: inclsets.get(x, [])\n        F = digraph(ntrans, R, FP)\n        return F\n\n    # -----------------------------------------------------------------------------\n    # add_lookaheads()\n    #\n    # Attaches the lookahead symbols to grammar rules.\n    #\n    # Inputs:    lookbacks         -  Set of lookback relations\n    #            followset         -  Computed follow set\n    #\n    # This function directly attaches the lookaheads to productions contained\n    # in the lookbacks set\n    # -----------------------------------------------------------------------------\n\n    def add_lookaheads(self, lookbacks, followset):\n        for trans, lb in lookbacks.items():\n            # Loop over productions in lookback\n            for state, p in lb:\n                if state not in p.lookaheads:\n                    p.lookaheads[state] = []\n                f = followset.get(trans, [])\n                for a in f:\n                    if a not in p.lookaheads[state]:\n                        p.lookaheads[state].append(a)\n\n    # -----------------------------------------------------------------------------\n    # add_lalr_lookaheads()\n    #\n    # This function does all of the work of adding lookahead information for use\n    # with LALR parsing\n    # -----------------------------------------------------------------------------\n\n    def add_lalr_lookaheads(self, C):\n        # Determine all of the nullable nonterminals\n        nullable = self.compute_nullable_nonterminals()\n\n        # Find all non-terminal transitions\n        trans = self.find_nonterminal_transitions(C)\n\n        # Compute read sets\n        readsets = self.compute_read_sets(C, trans, nullable)\n\n        # Compute lookback/includes relations\n        lookd, included = self.compute_lookback_includes(C, trans, nullable)\n\n        # Compute LALR FOLLOW sets\n        followsets = self.compute_follow_sets(trans, readsets, included)\n\n        # Add all of the lookaheads\n        self.add_lookaheads(lookd, followsets)\n\n    # -----------------------------------------------------------------------------\n    # lr_parse_table()\n    #\n    # This function constructs the parse tables for SLR or LALR\n    # -----------------------------------------------------------------------------\n    def lr_parse_table(self):\n        Productions = self.grammar.Productions\n        Precedence  = self.grammar.Precedence\n        goto   = self.lr_goto         # Goto array\n        action = self.lr_action       # Action array\n        log    = self.log             # Logger for output\n\n        actionp = {}                  # Action production array (temporary)\n\n        log.info('Parsing method: %s', self.lr_method)\n\n        # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items\n        # This determines the number of states\n\n        C = self.lr0_items()\n\n        if self.lr_method == 'LALR':\n            self.add_lalr_lookaheads(C)\n\n        # Build the parser table, state by state\n        st = 0\n        for I in C:\n            # Loop over each production in I\n            actlist = []              # List of actions\n            st_action  = {}\n            st_actionp = {}\n            st_goto    = {}\n            log.info('')\n            log.info('state %d', st)\n            log.info('')\n            for p in I:\n                log.info('    (%d) %s', p.number, p)\n            log.info('')\n\n            for p in I:\n                    if p.len == p.lr_index + 1:\n                        if p.name == \"S'\":\n                            # Start symbol. Accept!\n                            st_action['$end'] = 0\n                            st_actionp['$end'] = p\n                        else:\n                            # We are at the end of a production.  Reduce!\n                            if self.lr_method == 'LALR':\n                                laheads = p.lookaheads[st]\n                            else:\n                                laheads = self.grammar.Follow[p.name]\n                            for a in laheads:\n                                actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))\n                                r = st_action.get(a)\n                                if r is not None:\n                                    # Whoa. Have a shift/reduce or reduce/reduce conflict\n                                    if r > 0:\n                                        # Need to decide on shift or reduce here\n                                        # By default we favor shifting. Need to add\n                                        # some precedence rules here.\n\n                                        # Shift precedence comes from the token\n                                        sprec, slevel = Precedence.get(a, ('right', 0))\n\n                                        # Reduce precedence comes from rule being reduced (p)\n                                        rprec, rlevel = Productions[p.number].prec\n\n                                        if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):\n                                            # We really need to reduce here.\n                                            st_action[a] = -p.number\n                                            st_actionp[a] = p\n                                            if not slevel and not rlevel:\n                                                log.info('  ! shift/reduce conflict for %s resolved as reduce', a)\n                                                self.sr_conflicts.append((st, a, 'reduce'))\n                                            Productions[p.number].reduced += 1\n                                        elif (slevel == rlevel) and (rprec == 'nonassoc'):\n                                            st_action[a] = None\n                                        else:\n                                            # Hmmm. Guess we'll keep the shift\n                                            if not rlevel:\n                                                log.info('  ! shift/reduce conflict for %s resolved as shift', a)\n                                                self.sr_conflicts.append((st, a, 'shift'))\n                                    elif r < 0:\n                                        # Reduce/reduce conflict.   In this case, we favor the rule\n                                        # that was defined first in the grammar file\n                                        oldp = Productions[-r]\n                                        pp = Productions[p.number]\n                                        if oldp.line > pp.line:\n                                            st_action[a] = -p.number\n                                            st_actionp[a] = p\n                                            chosenp, rejectp = pp, oldp\n                                            Productions[p.number].reduced += 1\n                                            Productions[oldp.number].reduced -= 1\n                                        else:\n                                            chosenp, rejectp = oldp, pp\n                                        self.rr_conflicts.append((st, chosenp, rejectp))\n                                        log.info('  ! reduce/reduce conflict for %s resolved using rule %d (%s)',\n                                                 a, st_actionp[a].number, st_actionp[a])\n                                    else:\n                                        raise LALRError('Unknown conflict in state %d' % st)\n                                else:\n                                    st_action[a] = -p.number\n                                    st_actionp[a] = p\n                                    Productions[p.number].reduced += 1\n                    else:\n                        i = p.lr_index\n                        a = p.prod[i+1]       # Get symbol right after the \".\"\n                        if a in self.grammar.Terminals:\n                            g = self.lr0_goto(I, a)\n                            j = self.lr0_cidhash.get(id(g), -1)\n                            if j >= 0:\n                                # We are in a shift state\n                                actlist.append((a, p, 'shift and go to state %d' % j))\n                                r = st_action.get(a)\n                                if r is not None:\n                                    # Whoa have a shift/reduce or shift/shift conflict\n                                    if r > 0:\n                                        if r != j:\n                                            raise LALRError('Shift/shift conflict in state %d' % st)\n                                    elif r < 0:\n                                        # Do a precedence check.\n                                        #   -  if precedence of reduce rule is higher, we reduce.\n                                        #   -  if precedence of reduce is same and left assoc, we reduce.\n                                        #   -  otherwise we shift\n\n                                        # Shift precedence comes from the token\n                                        sprec, slevel = Precedence.get(a, ('right', 0))\n\n                                        # Reduce precedence comes from the rule that could have been reduced\n                                        rprec, rlevel = Productions[st_actionp[a].number].prec\n\n                                        if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):\n                                            # We decide to shift here... highest precedence to shift\n                                            Productions[st_actionp[a].number].reduced -= 1\n                                            st_action[a] = j\n                                            st_actionp[a] = p\n                                            if not rlevel:\n                                                log.info('  ! shift/reduce conflict for %s resolved as shift', a)\n                                                self.sr_conflicts.append((st, a, 'shift'))\n                                        elif (slevel == rlevel) and (rprec == 'nonassoc'):\n                                            st_action[a] = None\n                                        else:\n                                            # Hmmm. Guess we'll keep the reduce\n                                            if not slevel and not rlevel:\n                                                log.info('  ! shift/reduce conflict for %s resolved as reduce', a)\n                                                self.sr_conflicts.append((st, a, 'reduce'))\n\n                                    else:\n                                        raise LALRError('Unknown conflict in state %d' % st)\n                                else:\n                                    st_action[a] = j\n                                    st_actionp[a] = p\n\n            # Print the actions associated with each terminal\n            _actprint = {}\n            for a, p, m in actlist:\n                if a in st_action:\n                    if p is st_actionp[a]:\n                        log.info('    %-15s %s', a, m)\n                        _actprint[(a, m)] = 1\n            log.info('')\n            # Print the actions that were not used. (debugging)\n            not_used = 0\n            for a, p, m in actlist:\n                if a in st_action:\n                    if p is not st_actionp[a]:\n                        if not (a, m) in _actprint:\n                            log.debug('  ! %-15s [ %s ]', a, m)\n                            not_used = 1\n                            _actprint[(a, m)] = 1\n            if not_used:\n                log.debug('')\n\n            # Construct the goto table for this state\n\n            nkeys = {}\n            for ii in I:\n                for s in ii.usyms:\n                    if s in self.grammar.Nonterminals:\n                        nkeys[s] = None\n            for n in nkeys:\n                g = self.lr0_goto(I, n)\n                j = self.lr0_cidhash.get(id(g), -1)\n                if j >= 0:\n                    st_goto[n] = j\n                    log.info('    %-30s shift and go to state %d', n, j)\n\n            action[st] = st_action\n            actionp[st] = st_actionp\n            goto[st] = st_goto\n            st += 1\n\n    # -----------------------------------------------------------------------------\n    # write()\n    #\n    # This function writes the LR parsing tables to a file\n    # -----------------------------------------------------------------------------\n\n    def write_table(self, tabmodule, outputdir='', signature=''):\n        if isinstance(tabmodule, types.ModuleType):\n            raise IOError(\"Won't overwrite existing tabmodule\")\n\n        basemodulename = tabmodule.split('.')[-1]\n        filename = os.path.join(outputdir, basemodulename) + '.py'\n        try:\n            f = open(filename, 'w')\n\n            f.write('''\n# %s\n# This file is automatically generated. Do not edit.\n# pylint: disable=W,C,R\n_tabversion = %r\n\n_lr_method = %r\n\n_lr_signature = %r\n    ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))\n\n            # Change smaller to 0 to go back to original tables\n            smaller = 1\n\n            # Factor out names to try and make smaller\n            if smaller:\n                items = {}\n\n                for s, nd in self.lr_action.items():\n                    for name, v in nd.items():\n                        i = items.get(name)\n                        if not i:\n                            i = ([], [])\n                            items[name] = i\n                        i[0].append(s)\n                        i[1].append(v)\n\n                f.write('\\n_lr_action_items = {')\n                for k, v in items.items():\n                    f.write('%r:([' % k)\n                    for i in v[0]:\n                        f.write('%r,' % i)\n                    f.write('],[')\n                    for i in v[1]:\n                        f.write('%r,' % i)\n\n                    f.write(']),')\n                f.write('}\\n')\n\n                f.write('''\n_lr_action = {}\nfor _k, _v in _lr_action_items.items():\n   for _x,_y in zip(_v[0],_v[1]):\n      if not _x in _lr_action:  _lr_action[_x] = {}\n      _lr_action[_x][_k] = _y\ndel _lr_action_items\n''')\n\n            else:\n                f.write('\\n_lr_action = { ')\n                for k, v in self.lr_action.items():\n                    f.write('(%r,%r):%r,' % (k[0], k[1], v))\n                f.write('}\\n')\n\n            if smaller:\n                # Factor out names to try and make smaller\n                items = {}\n\n                for s, nd in self.lr_goto.items():\n                    for name, v in nd.items():\n                        i = items.get(name)\n                        if not i:\n                            i = ([], [])\n                            items[name] = i\n                        i[0].append(s)\n                        i[1].append(v)\n\n                f.write('\\n_lr_goto_items = {')\n                for k, v in items.items():\n                    f.write('%r:([' % k)\n                    for i in v[0]:\n                        f.write('%r,' % i)\n                    f.write('],[')\n                    for i in v[1]:\n                        f.write('%r,' % i)\n\n                    f.write(']),')\n                f.write('}\\n')\n\n                f.write('''\n_lr_goto = {}\nfor _k, _v in _lr_goto_items.items():\n   for _x, _y in zip(_v[0], _v[1]):\n       if not _x in _lr_goto: _lr_goto[_x] = {}\n       _lr_goto[_x][_k] = _y\ndel _lr_goto_items\n''')\n            else:\n                f.write('\\n_lr_goto = { ')\n                for k, v in self.lr_goto.items():\n                    f.write('(%r,%r):%r,' % (k[0], k[1], v))\n                f.write('}\\n')\n\n            # Write production table\n            f.write('_lr_productions = [\\n')\n            for p in self.lr_productions:\n                if p.func:\n                    f.write('  (%r,%r,%d,%r,%r,%d),\\n' % (p.str, p.name, p.len,\n                                                          p.func, os.path.basename(p.file), p.line))\n                else:\n                    f.write('  (%r,%r,%d,None,None,None),\\n' % (str(p), p.name, p.len))\n            f.write(']\\n')\n            f.close()\n\n        except IOError as e:\n            raise\n\n\n    # -----------------------------------------------------------------------------\n    # pickle_table()\n    #\n    # This function pickles the LR parsing tables to a supplied file object\n    # -----------------------------------------------------------------------------\n\n    def pickle_table(self, filename, signature=''):\n        try:\n            import cPickle as pickle\n        except ImportError:\n            import pickle\n        with open(filename, 'wb') as outf:\n            pickle.dump(__tabversion__, outf, pickle_protocol)\n            pickle.dump(self.lr_method, outf, pickle_protocol)\n            pickle.dump(signature, outf, pickle_protocol)\n            pickle.dump(self.lr_action, outf, pickle_protocol)\n            pickle.dump(self.lr_goto, outf, pickle_protocol)\n\n            outp = []\n            for p in self.lr_productions:\n                if p.func:\n                    outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))\n                else:\n                    outp.append((str(p), p.name, p.len, None, None, None))\n            pickle.dump(outp, outf, pickle_protocol)\n\n# -----------------------------------------------------------------------------\n#                            === INTROSPECTION ===\n#\n# The following functions and classes are used to implement the PLY\n# introspection features followed by the yacc() function itself.\n# -----------------------------------------------------------------------------\n\n# -----------------------------------------------------------------------------\n# get_caller_module_dict()\n#\n# This function returns a dictionary containing all of the symbols defined within\n# a caller further down the call stack.  This is used to get the environment\n# associated with the yacc() call if none was provided.\n# -----------------------------------------------------------------------------\n\ndef get_caller_module_dict(levels):\n    f = sys._getframe(levels)\n    ldict = f.f_globals.copy()\n    if f.f_globals != f.f_locals:\n        ldict.update(f.f_locals)\n    return ldict\n\n# -----------------------------------------------------------------------------\n# parse_grammar()\n#\n# This takes a raw grammar rule string and parses it into production data\n# -----------------------------------------------------------------------------\ndef parse_grammar(doc, file, line):\n    grammar = []\n    # Split the doc string into lines\n    pstrings = doc.splitlines()\n    lastp = None\n    dline = line\n    for ps in pstrings:\n        dline += 1\n        p = ps.split()\n        if not p:\n            continue\n        try:\n            if p[0] == '|':\n                # This is a continuation of a previous rule\n                if not lastp:\n                    raise SyntaxError(\"%s:%d: Misplaced '|'\" % (file, dline))\n                prodname = lastp\n                syms = p[1:]\n            else:\n                prodname = p[0]\n                lastp = prodname\n                syms   = p[2:]\n                assign = p[1]\n                if assign != ':' and assign != '::=':\n                    raise SyntaxError(\"%s:%d: Syntax error. Expected ':'\" % (file, dline))\n\n            grammar.append((file, dline, prodname, syms))\n        except SyntaxError:\n            raise\n        except Exception:\n            raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))\n\n    return grammar\n\n# -----------------------------------------------------------------------------\n# ParserReflect()\n#\n# This class represents information extracted for building a parser including\n# start symbol, error function, tokens, precedence list, action functions,\n# etc.\n# -----------------------------------------------------------------------------\nclass ParserReflect(object):\n    def __init__(self, pdict, log=None):\n        self.pdict      = pdict\n        self.start      = None\n        self.error_func = None\n        self.tokens     = None\n        self.modules    = set()\n        self.grammar    = []\n        self.error      = False\n\n        if log is None:\n            self.log = PlyLogger(sys.stderr)\n        else:\n            self.log = log\n\n    # Get all of the basic information\n    def get_all(self):\n        self.get_start()\n        self.get_error_func()\n        self.get_tokens()\n        self.get_precedence()\n        self.get_pfunctions()\n\n    # Validate all of the information\n    def validate_all(self):\n        self.validate_start()\n        self.validate_error_func()\n        self.validate_tokens()\n        self.validate_precedence()\n        self.validate_pfunctions()\n        self.validate_modules()\n        return self.error\n\n    # Compute a signature over the grammar\n    def signature(self):\n        parts = []\n        try:\n            if self.start:\n                parts.append(self.start)\n            if self.prec:\n                parts.append(''.join([''.join(p) for p in self.prec]))\n            if self.tokens:\n                parts.append(' '.join(self.tokens))\n            for f in self.pfuncs:\n                if f[3]:\n                    parts.append(f[3])\n        except (TypeError, ValueError):\n            pass\n        return ''.join(parts)\n\n    # -----------------------------------------------------------------------------\n    # validate_modules()\n    #\n    # This method checks to see if there are duplicated p_rulename() functions\n    # in the parser module file.  Without this function, it is really easy for\n    # users to make mistakes by cutting and pasting code fragments (and it's a real\n    # bugger to try and figure out why the resulting parser doesn't work).  Therefore,\n    # we just do a little regular expression pattern matching of def statements\n    # to try and detect duplicates.\n    # -----------------------------------------------------------------------------\n\n    def validate_modules(self):\n        # Match def p_funcname(\n        fre = re.compile(r'\\s*def\\s+(p_[a-zA-Z_0-9]*)\\(')\n\n        for module in self.modules:\n            try:\n                lines, linen = inspect.getsourcelines(module)\n            except IOError:\n                continue\n\n            counthash = {}\n            for linen, line in enumerate(lines):\n                linen += 1\n                m = fre.match(line)\n                if m:\n                    name = m.group(1)\n                    prev = counthash.get(name)\n                    if not prev:\n                        counthash[name] = linen\n                    else:\n                        filename = inspect.getsourcefile(module)\n                        self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',\n                                         filename, linen, name, prev)\n\n    # Get the start symbol\n    def get_start(self):\n        self.start = self.pdict.get('start')\n\n    # Validate the start symbol\n    def validate_start(self):\n        if self.start is not None:\n            if not isinstance(self.start, string_types):\n                self.log.error(\"'start' must be a string\")\n\n    # Look for error handler\n    def get_error_func(self):\n        self.error_func = self.pdict.get('p_error')\n\n    # Validate the error function\n    def validate_error_func(self):\n        if self.error_func:\n            if isinstance(self.error_func, types.FunctionType):\n                ismethod = 0\n            elif isinstance(self.error_func, types.MethodType):\n                ismethod = 1\n            else:\n                self.log.error(\"'p_error' defined, but is not a function or method\")\n                self.error = True\n                return\n\n            eline = self.error_func.__code__.co_firstlineno\n            efile = self.error_func.__code__.co_filename\n            module = inspect.getmodule(self.error_func)\n            self.modules.add(module)\n\n            argcount = self.error_func.__code__.co_argcount - ismethod\n            if argcount != 1:\n                self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)\n                self.error = True\n\n    # Get the tokens map\n    def get_tokens(self):\n        tokens = self.pdict.get('tokens')\n        if not tokens:\n            self.log.error('No token list is defined')\n            self.error = True\n            return\n\n        if not isinstance(tokens, (list, tuple)):\n            self.log.error('tokens must be a list or tuple')\n            self.error = True\n            return\n\n        if not tokens:\n            self.log.error('tokens is empty')\n            self.error = True\n            return\n\n        self.tokens = sorted(tokens)\n\n    # Validate the tokens\n    def validate_tokens(self):\n        # Validate the tokens.\n        if 'error' in self.tokens:\n            self.log.error(\"Illegal token name 'error'. Is a reserved word\")\n            self.error = True\n            return\n\n        terminals = set()\n        for n in self.tokens:\n            if n in terminals:\n                self.log.warning('Token %r multiply defined', n)\n            terminals.add(n)\n\n    # Get the precedence map (if any)\n    def get_precedence(self):\n        self.prec = self.pdict.get('precedence')\n\n    # Validate and parse the precedence map\n    def validate_precedence(self):\n        preclist = []\n        if self.prec:\n            if not isinstance(self.prec, (list, tuple)):\n                self.log.error('precedence must be a list or tuple')\n                self.error = True\n                return\n            for level, p in enumerate(self.prec):\n                if not isinstance(p, (list, tuple)):\n                    self.log.error('Bad precedence table')\n                    self.error = True\n                    return\n\n                if len(p) < 2:\n                    self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)\n                    self.error = True\n                    return\n                assoc = p[0]\n                if not isinstance(assoc, string_types):\n                    self.log.error('precedence associativity must be a string')\n                    self.error = True\n                    return\n                for term in p[1:]:\n                    if not isinstance(term, string_types):\n                        self.log.error('precedence items must be strings')\n                        self.error = True\n                        return\n                    preclist.append((term, assoc, level+1))\n        self.preclist = preclist\n\n    # Get all p_functions from the grammar\n    def get_pfunctions(self):\n        p_functions = []\n        for name, item in self.pdict.items():\n            if not name.startswith('p_') or name == 'p_error':\n                continue\n            if isinstance(item, (types.FunctionType, types.MethodType)):\n                line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)\n                module = inspect.getmodule(item)\n                p_functions.append((line, module, name, item.__doc__))\n\n        # Sort all of the actions by line number; make sure to stringify\n        # modules to make them sortable, since `line` may not uniquely sort all\n        # p functions\n        p_functions.sort(key=lambda p_function: (\n            p_function[0],\n            str(p_function[1]),\n            p_function[2],\n            p_function[3]))\n        self.pfuncs = p_functions\n\n    # Validate all of the p_functions\n    def validate_pfunctions(self):\n        grammar = []\n        # Check for non-empty symbols\n        if len(self.pfuncs) == 0:\n            self.log.error('no rules of the form p_rulename are defined')\n            self.error = True\n            return\n\n        for line, module, name, doc in self.pfuncs:\n            file = inspect.getsourcefile(module)\n            func = self.pdict[name]\n            if isinstance(func, types.MethodType):\n                reqargs = 2\n            else:\n                reqargs = 1\n            if func.__code__.co_argcount > reqargs:\n                self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)\n                self.error = True\n            elif func.__code__.co_argcount < reqargs:\n                self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)\n                self.error = True\n            elif not func.__doc__:\n                self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',\n                                 file, line, func.__name__)\n            else:\n                try:\n                    parsed_g = parse_grammar(doc, file, line)\n                    for g in parsed_g:\n                        grammar.append((name, g))\n                except SyntaxError as e:\n                    self.log.error(str(e))\n                    self.error = True\n\n                # Looks like a valid grammar rule\n                # Mark the file in which defined.\n                self.modules.add(module)\n\n        # Secondary validation step that looks for p_ definitions that are not functions\n        # or functions that look like they might be grammar rules.\n\n        for n, v in self.pdict.items():\n            if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):\n                continue\n            if n.startswith('t_'):\n                continue\n            if n.startswith('p_') and n != 'p_error':\n                self.log.warning('%r not defined as a function', n)\n            if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or\n                   (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):\n                if v.__doc__:\n                    try:\n                        doc = v.__doc__.split(' ')\n                        if doc[1] == ':':\n                            self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',\n                                             v.__code__.co_filename, v.__code__.co_firstlineno, n)\n                    except IndexError:\n                        pass\n\n        self.grammar = grammar\n\n# -----------------------------------------------------------------------------\n# yacc(module)\n#\n# Build a parser\n# -----------------------------------------------------------------------------\n\ndef yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,\n         check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,\n         outputdir=None, debuglog=None, errorlog=None, picklefile=None):\n\n    if tabmodule is None:\n        tabmodule = tab_module\n\n    # Reference to the parsing method of the last built parser\n    global parse\n\n    # If pickling is enabled, table files are not created\n    if picklefile:\n        write_tables = 0\n\n    if errorlog is None:\n        errorlog = PlyLogger(sys.stderr)\n\n    # Get the module dictionary used for the parser\n    if module:\n        _items = [(k, getattr(module, k)) for k in dir(module)]\n        pdict = dict(_items)\n        # If no __file__ or __package__ attributes are available, try to obtain them\n        # from the __module__ instead\n        if '__file__' not in pdict:\n            pdict['__file__'] = sys.modules[pdict['__module__']].__file__\n        if '__package__' not in pdict and '__module__' in pdict:\n            if hasattr(sys.modules[pdict['__module__']], '__package__'):\n                pdict['__package__'] = sys.modules[pdict['__module__']].__package__\n    else:\n        pdict = get_caller_module_dict(2)\n\n    if outputdir is None:\n        # If no output directory is set, the location of the output files\n        # is determined according to the following rules:\n        #     - If tabmodule specifies a package, files go into that package directory\n        #     - Otherwise, files go in the same directory as the specifying module\n        if isinstance(tabmodule, types.ModuleType):\n            srcfile = tabmodule.__file__\n        else:\n            if '.' not in tabmodule:\n                srcfile = pdict['__file__']\n            else:\n                parts = tabmodule.split('.')\n                pkgname = '.'.join(parts[:-1])\n                exec('import %s' % pkgname)\n                srcfile = getattr(sys.modules[pkgname], '__file__', '')\n        outputdir = os.path.dirname(srcfile)\n\n    # Determine if the module is package of a package or not.\n    # If so, fix the tabmodule setting so that tables load correctly\n    pkg = pdict.get('__package__')\n    if pkg and isinstance(tabmodule, str):\n        if '.' not in tabmodule:\n            tabmodule = pkg + '.' + tabmodule\n\n\n\n    # Set start symbol if it's specified directly using an argument\n    if start is not None:\n        pdict['start'] = start\n\n    # Collect parser information from the dictionary\n    pinfo = ParserReflect(pdict, log=errorlog)\n    pinfo.get_all()\n\n    if pinfo.error:\n        raise YaccError('Unable to build parser')\n\n    # Check signature against table files (if any)\n    signature = pinfo.signature()\n\n    # Read the tables\n    try:\n        lr = LRTable()\n        if picklefile:\n            read_signature = lr.read_pickle(picklefile)\n        else:\n            read_signature = lr.read_table(tabmodule)\n        if optimize or (read_signature == signature):\n            try:\n                lr.bind_callables(pinfo.pdict)\n                parser = LRParser(lr, pinfo.error_func)\n                parse = parser.parse\n                return parser\n            except Exception as e:\n                errorlog.warning('There was a problem loading the table file: %r', e)\n    except VersionError as e:\n        errorlog.warning(str(e))\n    except ImportError:\n        pass\n\n    if debuglog is None:\n        if debug:\n            try:\n                debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))\n            except IOError as e:\n                errorlog.warning(\"Couldn't open %r. %s\" % (debugfile, e))\n                debuglog = NullLogger()\n        else:\n            debuglog = NullLogger()\n\n    debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)\n\n    errors = False\n\n    # Validate the parser information\n    if pinfo.validate_all():\n        raise YaccError('Unable to build parser')\n\n    if not pinfo.error_func:\n        errorlog.warning('no p_error() function is defined')\n\n    # Create a grammar object\n    grammar = Grammar(pinfo.tokens)\n\n    # Set precedence level for terminals\n    for term, assoc, level in pinfo.preclist:\n        try:\n            grammar.set_precedence(term, assoc, level)\n        except GrammarError as e:\n            errorlog.warning('%s', e)\n\n    # Add productions to the grammar\n    for funcname, gram in pinfo.grammar:\n        file, line, prodname, syms = gram\n        try:\n            grammar.add_production(prodname, syms, funcname, file, line)\n        except GrammarError as e:\n            errorlog.error('%s', e)\n            errors = True\n\n    # Set the grammar start symbols\n    try:\n        if start is None:\n            grammar.set_start(pinfo.start)\n        else:\n            grammar.set_start(start)\n    except GrammarError as e:\n        errorlog.error(str(e))\n        errors = True\n\n    if errors:\n        raise YaccError('Unable to build parser')\n\n    # Verify the grammar structure\n    undefined_symbols = grammar.undefined_symbols()\n    for sym, prod in undefined_symbols:\n        errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)\n        errors = True\n\n    unused_terminals = grammar.unused_terminals()\n    if unused_terminals:\n        debuglog.info('')\n        debuglog.info('Unused terminals:')\n        debuglog.info('')\n        for term in unused_terminals:\n            errorlog.warning('Token %r defined, but not used', term)\n            debuglog.info('    %s', term)\n\n    # Print out all productions to the debug log\n    if debug:\n        debuglog.info('')\n        debuglog.info('Grammar')\n        debuglog.info('')\n        for n, p in enumerate(grammar.Productions):\n            debuglog.info('Rule %-5d %s', n, p)\n\n    # Find unused non-terminals\n    unused_rules = grammar.unused_rules()\n    for prod in unused_rules:\n        errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)\n\n    if len(unused_terminals) == 1:\n        errorlog.warning('There is 1 unused token')\n    if len(unused_terminals) > 1:\n        errorlog.warning('There are %d unused tokens', len(unused_terminals))\n\n    if len(unused_rules) == 1:\n        errorlog.warning('There is 1 unused rule')\n    if len(unused_rules) > 1:\n        errorlog.warning('There are %d unused rules', len(unused_rules))\n\n    if debug:\n        debuglog.info('')\n        debuglog.info('Terminals, with rules where they appear')\n        debuglog.info('')\n        terms = list(grammar.Terminals)\n        terms.sort()\n        for term in terms:\n            debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))\n\n        debuglog.info('')\n        debuglog.info('Nonterminals, with rules where they appear')\n        debuglog.info('')\n        nonterms = list(grammar.Nonterminals)\n        nonterms.sort()\n        for nonterm in nonterms:\n            debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))\n        debuglog.info('')\n\n    if check_recursion:\n        unreachable = grammar.find_unreachable()\n        for u in unreachable:\n            errorlog.warning('Symbol %r is unreachable', u)\n\n        infinite = grammar.infinite_cycles()\n        for inf in infinite:\n            errorlog.error('Infinite recursion detected for symbol %r', inf)\n            errors = True\n\n    unused_prec = grammar.unused_precedence()\n    for term, assoc in unused_prec:\n        errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)\n        errors = True\n\n    if errors:\n        raise YaccError('Unable to build parser')\n\n    # Run the LRGeneratedTable on the grammar\n    if debug:\n        errorlog.debug('Generating %s tables', method)\n\n    lr = LRGeneratedTable(grammar, method, debuglog)\n\n    if debug:\n        num_sr = len(lr.sr_conflicts)\n\n        # Report shift/reduce and reduce/reduce conflicts\n        if num_sr == 1:\n            errorlog.warning('1 shift/reduce conflict')\n        elif num_sr > 1:\n            errorlog.warning('%d shift/reduce conflicts', num_sr)\n\n        num_rr = len(lr.rr_conflicts)\n        if num_rr == 1:\n            errorlog.warning('1 reduce/reduce conflict')\n        elif num_rr > 1:\n            errorlog.warning('%d reduce/reduce conflicts', num_rr)\n\n    # Write out conflicts to the output file\n    if debug and (lr.sr_conflicts or lr.rr_conflicts):\n        debuglog.warning('')\n        debuglog.warning('Conflicts:')\n        debuglog.warning('')\n\n        for state, tok, resolution in lr.sr_conflicts:\n            debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s',  tok, state, resolution)\n\n        already_reported = set()\n        for state, rule, rejected in lr.rr_conflicts:\n            if (state, id(rule), id(rejected)) in already_reported:\n                continue\n            debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)\n            debuglog.warning('rejected rule (%s) in state %d', rejected, state)\n            errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)\n            errorlog.warning('rejected rule (%s) in state %d', rejected, state)\n            already_reported.add((state, id(rule), id(rejected)))\n\n        warned_never = []\n        for state, rule, rejected in lr.rr_conflicts:\n            if not rejected.reduced and (rejected not in warned_never):\n                debuglog.warning('Rule (%s) is never reduced', rejected)\n                errorlog.warning('Rule (%s) is never reduced', rejected)\n                warned_never.append(rejected)\n\n    # Write the table file if requested\n    if write_tables:\n        try:\n            lr.write_table(tabmodule, outputdir, signature)\n            if tabmodule in sys.modules:\n                del sys.modules[tabmodule]\n        except IOError as e:\n            errorlog.warning(\"Couldn't create %r. %s\" % (tabmodule, e))\n\n    # Write a pickled version of the tables\n    if picklefile:\n        try:\n            lr.pickle_table(picklefile, signature)\n        except IOError as e:\n            errorlog.warning(\"Couldn't create %r. %s\" % (picklefile, e))\n\n    # Build the parser\n    lr.bind_callables(pinfo.pdict)\n    parser = LRParser(lr, pinfo.error_func)\n\n    parse = parser.parse\n    return parser\n"
  },
  {
    "path": "ctypesgen/printer_json/__init__.py",
    "content": "\"\"\"\nThis module is the backend to ctypesgen; it contains classes to\nproduce the final .py output files.\n\"\"\"\n\nfrom .printer import WrapperPrinter\n\n__all__ = [\"WrapperPrinter\"]\n"
  },
  {
    "path": "ctypesgen/printer_json/printer.py",
    "content": "import os\nimport sys\nimport json\n\nfrom ctypesgen.ctypedescs import CtypesBitfield\nfrom ctypesgen.messages import status_message\n\n\n# From:\n# http://stackoverflow.com/questions/1036409/recursively-convert-python-object-graph-to-dictionary\ndef todict(obj, classkey=\"Klass\"):\n    if isinstance(obj, dict):\n        for k in obj.keys():\n            obj[k] = todict(obj[k], classkey)\n        return obj\n    elif isinstance(obj, str) or isinstance(obj, bytes):\n        # must handle strings before __iter__ test, since they now have\n        # __iter__ in Python3\n        return obj\n    elif hasattr(obj, \"__iter__\"):\n        return [todict(v, classkey) for v in obj]\n    elif hasattr(obj, \"__dict__\"):\n        data = dict(\n            [\n                (key, todict(value, classkey))\n                for key, value in obj.__dict__.items()\n                if not callable(value) and not key.startswith(\"_\")\n            ]\n        )\n        if classkey is not None and hasattr(obj, \"__class__\"):\n            data[classkey] = obj.__class__.__name__\n        return data\n    else:\n        return obj\n\n\nclass WrapperPrinter:\n    def __init__(self, outpath, options, data):\n        status_message(\"Writing to %s.\" % (outpath or \"stdout\"))\n\n        self.file = open(outpath, \"w\") if outpath else sys.stdout\n        self.options = options\n\n        if self.options.strip_build_path and self.options.strip_build_path[-1] != os.path.sep:\n            self.options.strip_build_path += os.path.sep\n\n        self.print_group(self.options.libraries, \"libraries\", self.print_library)\n\n        method_table = {\n            \"function\": self.print_function,\n            \"macro\": self.print_macro,\n            \"struct\": self.print_struct,\n            \"struct-body\": self.print_struct_members,\n            \"typedef\": self.print_typedef,\n            \"variable\": self.print_variable,\n            \"enum\": self.print_enum,\n            \"constant\": self.print_constant,\n            \"undef\": self.print_undef,\n        }\n\n        res = []\n        for kind, desc in data.output_order:\n            if desc.included:\n                item = method_table[kind](desc)\n                if item:\n                    res.append(item)\n        self.file.write(json.dumps(res, sort_keys=True, indent=4))\n        self.file.write(\"\\n\")\n\n    def __del__(self):\n        self.file.close()\n\n    def print_group(self, list, name, function):\n        if list:\n            return [function(obj) for obj in list]\n\n    def print_library(self, library):\n        return {\"load_library\": library}\n\n    def print_constant(self, constant):\n        return {\"type\": \"constant\", \"name\": constant.name, \"value\": constant.value.py_string(False)}\n\n    def print_undef(self, undef):\n        return {\"type\": \"undef\", \"value\": undef.macro.py_string(False)}\n\n    def print_typedef(self, typedef):\n        return {\"type\": \"typedef\", \"name\": typedef.name, \"ctype\": todict(typedef.ctype)}\n\n    def print_struct(self, struct):\n        res = {\"type\": struct.variety, \"name\": struct.tag, \"attrib\": struct.attrib}\n        if not struct.opaque:\n            res[\"fields\"] = []\n            for name, ctype in struct.members:\n                field = {\"name\": name, \"ctype\": todict(ctype)}\n                if isinstance(ctype, CtypesBitfield):\n                    field[\"bitfield\"] = ctype.bitfield.py_string(False)\n                res[\"fields\"].append(field)\n        return res\n\n    def print_struct_members(self, struct):\n        pass\n\n    def print_enum(self, enum):\n        res = {\"type\": \"enum\", \"name\": enum.tag}\n\n        if not enum.opaque:\n            res[\"fields\"] = []\n            for name, ctype in enum.members:\n                field = {\"name\": name, \"ctype\": todict(ctype)}\n                res[\"fields\"].append(field)\n        return res\n\n    def print_function(self, function):\n        res = {\n            \"type\": \"function\",\n            \"name\": function.c_name(),\n            \"variadic\": function.variadic,\n            \"args\": todict(function.argtypes),\n            \"return\": todict(function.restype),\n            \"attrib\": function.attrib,\n        }\n        if function.source_library:\n            res[\"source\"] = function.source_library\n        return res\n\n    def print_variable(self, variable):\n        res = {\"type\": \"variable\", \"ctype\": todict(variable.ctype), \"name\": variable.c_name()}\n        if variable.source_library:\n            res[\"source\"] = variable.source_library\n        return res\n\n    def print_macro(self, macro):\n        if macro.params:\n            return {\n                \"type\": \"macro_function\",\n                \"name\": macro.name,\n                \"args\": macro.params,\n                \"body\": macro.expr.py_string(True),\n            }\n        else:\n            # The macro translator makes heroic efforts but it occasionally fails.\n            # Beware the contents of the value!\n            return {\"type\": \"macro\", \"name\": macro.name, \"value\": macro.expr.py_string(True)}\n"
  },
  {
    "path": "ctypesgen/printer_python/__init__.py",
    "content": "\"\"\"\nThis module is the backend to ctypesgen; it contains classes to\nproduce the final .py output files.\n\"\"\"\n\nfrom .printer import WrapperPrinter\n\n__all__ = [\"WrapperPrinter\"]\n"
  },
  {
    "path": "ctypesgen/printer_python/defaultheader.py",
    "content": "r\"\"\"Wrapper for %(name)s\n\nGenerated with:\n%(argv)s\n\nDo not modify this file.\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n"
  },
  {
    "path": "ctypesgen/printer_python/preamble.py",
    "content": "import ctypes\nimport sys\nfrom ctypes import *  # noqa: F401, F403\n\n_int_types = (ctypes.c_int16, ctypes.c_int32)\nif hasattr(ctypes, \"c_int64\"):\n    # Some builds of ctypes apparently do not have ctypes.c_int64\n    # defined; it's a pretty good bet that these builds do not\n    # have 64-bit pointers.\n    _int_types += (ctypes.c_int64,)\nfor t in _int_types:\n    if ctypes.sizeof(t) == ctypes.sizeof(ctypes.c_size_t):\n        c_ptrdiff_t = t\ndel t\ndel _int_types\n\n\n# ~POINTER~\nclass UserString:\n    def __init__(self, seq):\n        if isinstance(seq, bytes):\n            self.data = seq\n        elif isinstance(seq, UserString):\n            self.data = seq.data[:]\n        else:\n            self.data = str(seq).encode()\n\n    def __bytes__(self):\n        return self.data\n\n    def __str__(self):\n        return self.data.decode()\n\n    def __repr__(self):\n        return repr(self.data)\n\n    def __int__(self):\n        return int(self.data.decode())\n\n    def __long__(self):\n        return int(self.data.decode())\n\n    def __float__(self):\n        return float(self.data.decode())\n\n    def __complex__(self):\n        return complex(self.data.decode())\n\n    def __hash__(self):\n        return hash(self.data)\n\n    def __le__(self, string):\n        if isinstance(string, UserString):\n            return self.data <= string.data\n        else:\n            return self.data <= string\n\n    def __lt__(self, string):\n        if isinstance(string, UserString):\n            return self.data < string.data\n        else:\n            return self.data < string\n\n    def __ge__(self, string):\n        if isinstance(string, UserString):\n            return self.data >= string.data\n        else:\n            return self.data >= string\n\n    def __gt__(self, string):\n        if isinstance(string, UserString):\n            return self.data > string.data\n        else:\n            return self.data > string\n\n    def __eq__(self, string):\n        if isinstance(string, UserString):\n            return self.data == string.data\n        else:\n            return self.data == string\n\n    def __ne__(self, string):\n        if isinstance(string, UserString):\n            return self.data != string.data\n        else:\n            return self.data != string\n\n    def __contains__(self, char):\n        return char in self.data\n\n    def __len__(self):\n        return len(self.data)\n\n    def __getitem__(self, index):\n        return self.__class__(self.data[index])\n\n    def __getslice__(self, start, end):\n        start = max(start, 0)\n        end = max(end, 0)\n        return self.__class__(self.data[start:end])\n\n    def __add__(self, other):\n        if isinstance(other, UserString):\n            return self.__class__(self.data + other.data)\n        elif isinstance(other, bytes):\n            return self.__class__(self.data + other)\n        else:\n            return self.__class__(self.data + str(other).encode())\n\n    def __radd__(self, other):\n        if isinstance(other, bytes):\n            return self.__class__(other + self.data)\n        else:\n            return self.__class__(str(other).encode() + self.data)\n\n    def __mul__(self, n):\n        return self.__class__(self.data * n)\n\n    __rmul__ = __mul__\n\n    def __mod__(self, args):\n        return self.__class__(self.data % args)\n\n    # the following methods are defined in alphabetical order:\n    def capitalize(self):\n        return self.__class__(self.data.capitalize())\n\n    def center(self, width, *args):\n        return self.__class__(self.data.center(width, *args))\n\n    def count(self, sub, start=0, end=sys.maxsize):\n        return self.data.count(sub, start, end)\n\n    def decode(self, encoding=None, errors=None):  # XXX improve this?\n        if encoding:\n            if errors:\n                return self.__class__(self.data.decode(encoding, errors))\n            else:\n                return self.__class__(self.data.decode(encoding))\n        else:\n            return self.__class__(self.data.decode())\n\n    def encode(self, encoding=None, errors=None):  # XXX improve this?\n        if encoding:\n            if errors:\n                return self.__class__(self.data.encode(encoding, errors))\n            else:\n                return self.__class__(self.data.encode(encoding))\n        else:\n            return self.__class__(self.data.encode())\n\n    def endswith(self, suffix, start=0, end=sys.maxsize):\n        return self.data.endswith(suffix, start, end)\n\n    def expandtabs(self, tabsize=8):\n        return self.__class__(self.data.expandtabs(tabsize))\n\n    def find(self, sub, start=0, end=sys.maxsize):\n        return self.data.find(sub, start, end)\n\n    def index(self, sub, start=0, end=sys.maxsize):\n        return self.data.index(sub, start, end)\n\n    def isalpha(self):\n        return self.data.isalpha()\n\n    def isalnum(self):\n        return self.data.isalnum()\n\n    def isdecimal(self):\n        return self.data.isdecimal()\n\n    def isdigit(self):\n        return self.data.isdigit()\n\n    def islower(self):\n        return self.data.islower()\n\n    def isnumeric(self):\n        return self.data.isnumeric()\n\n    def isspace(self):\n        return self.data.isspace()\n\n    def istitle(self):\n        return self.data.istitle()\n\n    def isupper(self):\n        return self.data.isupper()\n\n    def join(self, seq):\n        return self.data.join(seq)\n\n    def ljust(self, width, *args):\n        return self.__class__(self.data.ljust(width, *args))\n\n    def lower(self):\n        return self.__class__(self.data.lower())\n\n    def lstrip(self, chars=None):\n        return self.__class__(self.data.lstrip(chars))\n\n    def partition(self, sep):\n        return self.data.partition(sep)\n\n    def replace(self, old, new, maxsplit=-1):\n        return self.__class__(self.data.replace(old, new, maxsplit))\n\n    def rfind(self, sub, start=0, end=sys.maxsize):\n        return self.data.rfind(sub, start, end)\n\n    def rindex(self, sub, start=0, end=sys.maxsize):\n        return self.data.rindex(sub, start, end)\n\n    def rjust(self, width, *args):\n        return self.__class__(self.data.rjust(width, *args))\n\n    def rpartition(self, sep):\n        return self.data.rpartition(sep)\n\n    def rstrip(self, chars=None):\n        return self.__class__(self.data.rstrip(chars))\n\n    def split(self, sep=None, maxsplit=-1):\n        return self.data.split(sep, maxsplit)\n\n    def rsplit(self, sep=None, maxsplit=-1):\n        return self.data.rsplit(sep, maxsplit)\n\n    def splitlines(self, keepends=0):\n        return self.data.splitlines(keepends)\n\n    def startswith(self, prefix, start=0, end=sys.maxsize):\n        return self.data.startswith(prefix, start, end)\n\n    def strip(self, chars=None):\n        return self.__class__(self.data.strip(chars))\n\n    def swapcase(self):\n        return self.__class__(self.data.swapcase())\n\n    def title(self):\n        return self.__class__(self.data.title())\n\n    def translate(self, *args):\n        return self.__class__(self.data.translate(*args))\n\n    def upper(self):\n        return self.__class__(self.data.upper())\n\n    def zfill(self, width):\n        return self.__class__(self.data.zfill(width))\n\n\nclass MutableString(UserString):\n    \"\"\"mutable string objects\n\n    Python strings are immutable objects.  This has the advantage, that\n    strings may be used as dictionary keys.  If this property isn't needed\n    and you insist on changing string values in place instead, you may cheat\n    and use MutableString.\n\n    But the purpose of this class is an educational one: to prevent\n    people from inventing their own mutable string class derived\n    from UserString and than forget thereby to remove (override) the\n    __hash__ method inherited from UserString.  This would lead to\n    errors that would be very hard to track down.\n\n    A faster and better solution is to rewrite your program using lists.\"\"\"\n\n    def __init__(self, string=\"\"):\n        self.data = string\n\n    def __hash__(self):\n        raise TypeError(\"unhashable type (it is mutable)\")\n\n    def __setitem__(self, index, sub):\n        if index < 0:\n            index += len(self.data)\n        if index < 0 or index >= len(self.data):\n            raise IndexError\n        self.data = self.data[:index] + sub + self.data[index + 1 :]\n\n    def __delitem__(self, index):\n        if index < 0:\n            index += len(self.data)\n        if index < 0 or index >= len(self.data):\n            raise IndexError\n        self.data = self.data[:index] + self.data[index + 1 :]\n\n    def __setslice__(self, start, end, sub):\n        start = max(start, 0)\n        end = max(end, 0)\n        if isinstance(sub, UserString):\n            self.data = self.data[:start] + sub.data + self.data[end:]\n        elif isinstance(sub, bytes):\n            self.data = self.data[:start] + sub + self.data[end:]\n        else:\n            self.data = self.data[:start] + str(sub).encode() + self.data[end:]\n\n    def __delslice__(self, start, end):\n        start = max(start, 0)\n        end = max(end, 0)\n        self.data = self.data[:start] + self.data[end:]\n\n    def immutable(self):\n        return UserString(self.data)\n\n    def __iadd__(self, other):\n        if isinstance(other, UserString):\n            self.data += other.data\n        elif isinstance(other, bytes):\n            self.data += other\n        else:\n            self.data += str(other).encode()\n        return self\n\n    def __imul__(self, n):\n        self.data *= n\n        return self\n\n\nclass String(MutableString, ctypes.Union):\n    _fields_ = [(\"raw\", ctypes.POINTER(ctypes.c_char)), (\"data\", ctypes.c_char_p)]\n\n    def __init__(self, obj=b\"\"):\n        if isinstance(obj, (bytes, UserString)):\n            self.data = bytes(obj)\n        else:\n            self.raw = obj\n\n    def __len__(self):\n        return self.data and len(self.data) or 0\n\n    def from_param(cls, obj):\n        # Convert None or 0\n        if obj is None or obj == 0:\n            return cls(ctypes.POINTER(ctypes.c_char)())\n\n        # Convert from String\n        elif isinstance(obj, String):\n            return obj\n\n        # Convert from bytes\n        elif isinstance(obj, bytes):\n            return cls(obj)\n\n        # Convert from str\n        elif isinstance(obj, str):\n            return cls(obj.encode())\n\n        # Convert from c_char_p\n        elif isinstance(obj, ctypes.c_char_p):\n            return obj\n\n        # Convert from POINTER(ctypes.c_char)\n        elif isinstance(obj, ctypes.POINTER(ctypes.c_char)):\n            return obj\n\n        # Convert from raw pointer\n        elif isinstance(obj, int):\n            return cls(ctypes.cast(obj, ctypes.POINTER(ctypes.c_char)))\n\n        # Convert from ctypes.c_char array\n        elif isinstance(obj, ctypes.c_char * len(obj)):\n            return obj\n\n        # Convert from object\n        else:\n            return String.from_param(obj._as_parameter_)\n\n    from_param = classmethod(from_param)\n\n\ndef ReturnString(obj, func=None, arguments=None):\n    return String.from_param(obj)\n\n\n# As of ctypes 1.0, ctypes does not support custom error-checking\n# functions on callbacks, nor does it support custom datatypes on\n# callbacks, so we must ensure that all callbacks return\n# primitive datatypes.\n#\n# Non-primitive return values wrapped with UNCHECKED won't be\n# typechecked, and will be converted to ctypes.c_void_p.\ndef UNCHECKED(type):\n    if hasattr(type, \"_type_\") and isinstance(type._type_, str) and type._type_ != \"P\":\n        return type\n    else:\n        return ctypes.c_void_p\n\n\n# ctypes doesn't have direct support for variadic functions, so we have to write\n# our own wrapper class\nclass _variadic_function(object):\n    def __init__(self, func, restype, argtypes, errcheck):\n        self.func = func\n        self.func.restype = restype\n        self.argtypes = argtypes\n        if errcheck:\n            self.func.errcheck = errcheck\n\n    def _as_parameter_(self):\n        # So we can pass this variadic function as a function pointer\n        return self.func\n\n    def __call__(self, *args):\n        fixed_args = []\n        i = 0\n        for argtype in self.argtypes:\n            # Typecheck what we can\n            fixed_args.append(argtype.from_param(args[i]))\n            i += 1\n        return self.func(*fixed_args + list(args[i:]))\n\n\ndef ord_if_char(value):\n    \"\"\"\n    Simple helper used for casts to simple builtin types:  if the argument is a\n    string type, it will be converted to it's ordinal value.\n\n    This function will raise an exception if the argument is string with more\n    than one characters.\n    \"\"\"\n    return ord(value) if (isinstance(value, bytes) or isinstance(value, str)) else value\n"
  },
  {
    "path": "ctypesgen/printer_python/printer.py",
    "content": "import os\nimport os.path\nimport sys\nimport time\nimport shutil\n\nfrom ctypesgen.ctypedescs import CtypesBitfield, CtypesStruct\nfrom ctypesgen.expressions import ExpressionNode\nfrom ctypesgen.messages import error_message, status_message\n\n\nTHIS_DIR = os.path.dirname(__file__)\nCTYPESGEN_DIR = os.path.join(THIS_DIR, os.path.pardir)\nPREAMBLE_PATH = os.path.join(THIS_DIR, \"preamble.py\")\nDEFAULTHEADER_PATH = os.path.join(THIS_DIR, \"defaultheader.py\")\nLIBRARYLOADER_PATH = os.path.join(CTYPESGEN_DIR, \"libraryloader.py\")\n\n\nclass WrapperPrinter:\n    def __init__(self, outpath, options, data):\n        status_message(\"Writing to %s.\" % (outpath or \"stdout\"))\n\n        self.file = open(outpath, \"w\") if outpath else sys.stdout\n        self.options = options\n\n        if self.options.strip_build_path and self.options.strip_build_path[-1] != os.path.sep:\n            self.options.strip_build_path += os.path.sep\n\n        if not self.options.embed_preamble and outpath:\n            self._copy_preamble_loader_files(outpath)\n\n        self.print_header()\n        self.file.write(\"\\n\")\n\n        self.print_preamble()\n        self.file.write(\"\\n\")\n\n        self.print_loader()\n        self.file.write(\"\\n\")\n\n        self.print_group(self.options.libraries, \"libraries\", self.print_library)\n        self.print_group(self.options.modules, \"modules\", self.print_module)\n\n        method_table = {\n            \"function\": self.print_function,\n            \"macro\": self.print_macro,\n            \"struct\": self.print_struct,\n            \"struct-body\": self.print_struct_members,\n            \"typedef\": self.print_typedef,\n            \"variable\": self.print_variable,\n            \"enum\": self.print_enum,\n            \"constant\": self.print_constant,\n            \"undef\": self.print_undef,\n        }\n\n        for kind, desc in data.output_order:\n            if desc.included:\n                method_table[kind](desc)\n                self.file.write(\"\\n\")\n\n        self.print_group(self.options.inserted_files, \"inserted files\", self.insert_file)\n        self.strip_prefixes()\n\n    def __del__(self):\n        self.file.close()\n\n    def print_group(self, list, name, function):\n        if list:\n            self.file.write(\"# Begin %s\\n\" % name)\n            for obj in list:\n                function(obj)\n            self.file.write(\"\\n\")\n            self.file.write(\"# %d %s\\n\" % (len(list), name))\n            self.file.write(\"# End %s\\n\" % name)\n        else:\n            self.file.write(\"# No %s\\n\" % name)\n        self.file.write(\"\\n\")\n\n    def srcinfo(self, src):\n        if src is None:\n            self.file.write(\"\\n\")\n        else:\n            filename, lineno = src\n            if filename in (\"<built-in>\", \"<command line>\"):\n                self.file.write(\"# %s\\n\" % filename)\n            else:\n                if self.options.strip_build_path and filename.startswith(\n                    self.options.strip_build_path\n                ):\n                    filename = filename[len(self.options.strip_build_path) :]\n                self.file.write(\"# %s: %s\\n\" % (filename, lineno))\n\n    def template_subs(self):\n        template_subs = {\n            \"date\": time.ctime(),\n            \"argv\": \" \".join([x for x in sys.argv if not x.startswith(\"--strip-build-path\")]),\n            \"name\": os.path.basename(self.options.headers[0]),\n        }\n\n        for opt, value in self.options.__dict__.items():\n            if type(value) == str:\n                template_subs[opt] = value\n            elif isinstance(value, (list, tuple)):\n                template_subs[opt] = (os.path.sep).join(value)\n            else:\n                template_subs[opt] = repr(value)\n\n        return template_subs\n\n    def print_header(self):\n        template_file = None\n\n        if self.options.header_template:\n            path = self.options.header_template\n            try:\n                template_file = open(path, \"r\")\n            except IOError:\n                error_message(\n                    'Cannot load header template from file \"%s\" '\n                    \" - using default template.\" % path,\n                    cls=\"missing-file\",\n                )\n\n        if not template_file:\n            template_file = open(DEFAULTHEADER_PATH, \"r\")\n\n        template_subs = self.template_subs()\n        self.file.write(template_file.read() % template_subs)\n\n        template_file.close()\n\n    def print_preamble(self):\n        self.file.write(\"# Begin preamble for Python\\n\\n\")\n        if self.options.embed_preamble:\n            with open(PREAMBLE_PATH, \"r\") as preamble_file:\n                preamble_file_content = preamble_file.read()\n                filecontent = preamble_file_content.replace(\"# ~POINTER~\", \"\")\n                self.file.write(filecontent)\n        else:\n            self.file.write(\"from .ctypes_preamble import *\\n\")\n            self.file.write(\"from .ctypes_preamble import _variadic_function\\n\")\n\n        self.file.write(\"\\n# End preamble\\n\")\n\n    def _copy_preamble_loader_files(self, path):\n        if os.path.isfile(path):\n            abspath = os.path.abspath(path)\n            dst = os.path.dirname(abspath)\n        else:\n            error_message(\n                \"Cannot copy preamble and loader files\",\n                cls=\"missing-file\",\n            )\n            return\n\n        c_preamblefile = f\"{dst}/ctypes_preamble.py\"\n        if os.path.isfile(c_preamblefile):\n            return\n\n        pointer = \"\"\"def POINTER(obj):\n    p = ctypes.POINTER(obj)\n\n    # Convert None to a real NULL pointer to work around bugs\n    # in how ctypes handles None on 64-bit platforms\n    if not isinstance(p.from_param, classmethod):\n\n        def from_param(cls, x):\n            if x is None:\n                return cls()\n            else:\n                return x\n\n        p.from_param = classmethod(from_param)\n\n    return p\n\n\"\"\"\n\n        with open(PREAMBLE_PATH) as preamble_file:\n            preamble_file_content = preamble_file.read()\n            filecontent = preamble_file_content.replace(\"# ~POINTER~\", pointer)\n\n        with open(c_preamblefile, \"w\") as f:\n            f.write(filecontent)\n\n        shutil.copy(LIBRARYLOADER_PATH, f\"{dst}\")\n        os.rename(f\"{dst}/libraryloader.py\", f\"{dst}/ctypes_loader.py\")\n\n    def print_loader(self):\n        self.file.write(\"_libs = {}\\n\")\n        self.file.write(\"_libdirs = %s\\n\\n\" % self.options.compile_libdirs)\n        self.file.write(\"# Begin loader\\n\\n\")\n        if self.options.embed_preamble:\n            with open(LIBRARYLOADER_PATH, \"r\") as loader_file:\n                self.file.write(loader_file.read())\n        else:\n            self.file.write(\"from .ctypes_loader import *\\n\")\n        self.file.write(\"\\n# End loader\\n\\n\")\n        self.file.write(\n            \"add_library_search_dirs([%s])\"\n            % \", \".join([repr(d) for d in self.options.runtime_libdirs])\n        )\n        self.file.write(\"\\n\")\n\n    def print_library(self, library):\n        self.file.write('_libs[\"%s\"] = load_library(\"%s\")\\n' % (library, library))\n\n    def print_module(self, module):\n        self.file.write(\"from %s import *\\n\" % module)\n\n    def print_constant(self, constant):\n        self.file.write(\"%s = %s\" % (constant.name, constant.value.py_string(False)))\n        self.srcinfo(constant.src)\n\n    def print_undef(self, undef):\n        self.srcinfo(undef.src)\n        self.file.write(\n            \"# #undef {macro}\\n\"\n            \"try:\\n\"\n            \"    del {macro}\\n\"\n            \"except NameError:\\n\"\n            \"    pass\\n\".format(macro=undef.macro.py_string(False))\n        )\n\n    def print_typedef(self, typedef):\n        self.file.write(\"%s = %s\" % (typedef.name, typedef.ctype.py_string()))\n        self.srcinfo(typedef.src)\n\n    def print_struct(self, struct):\n        self.srcinfo(struct.src)\n        base = {\"union\": \"Union\", \"struct\": \"Structure\"}[struct.variety]\n        self.file.write(\"class %s_%s(%s):\\n\" \"    pass\\n\" % (struct.variety, struct.tag, base))\n\n    def print_struct_members(self, struct):\n        if struct.opaque:\n            return\n\n        # is this supposed to be packed?\n        if struct.attrib.get(\"packed\", False):\n            aligned = struct.attrib.get(\"aligned\", [1])\n            assert len(aligned) == 1, \"cgrammar gave more than one arg for aligned attribute\"\n            aligned = aligned[0]\n            if isinstance(aligned, ExpressionNode):\n                # TODO: for non-constant expression nodes, this will fail:\n                aligned = aligned.evaluate(None)\n            self.file.write(\"{}_{}._pack_ = {}\\n\".format(struct.variety, struct.tag, aligned))\n\n        # handle unnamed fields.\n        unnamed_fields = []\n        names = set([x[0] for x in struct.members])\n        anon_prefix = \"unnamed_\"\n        n = 1\n        for mi in range(len(struct.members)):\n            mem = list(struct.members[mi])\n            if mem[0] is None:\n                while True:\n                    name = \"%s%i\" % (anon_prefix, n)\n                    n += 1\n                    if name not in names:\n                        break\n                mem[0] = name\n                names.add(name)\n                if type(mem[1]) is CtypesStruct:\n                    unnamed_fields.append(name)\n                struct.members[mi] = mem\n\n        self.file.write(\"%s_%s.__slots__ = [\\n\" % (struct.variety, struct.tag))\n        for name, ctype in struct.members:\n            self.file.write(\"    '%s',\\n\" % name)\n        self.file.write(\"]\\n\")\n\n        if len(unnamed_fields) > 0:\n            self.file.write(\"%s_%s._anonymous_ = [\\n\" % (struct.variety, struct.tag))\n            for name in unnamed_fields:\n                self.file.write(\"    '%s',\\n\" % name)\n            self.file.write(\"]\\n\")\n\n        self.file.write(\"%s_%s._fields_ = [\\n\" % (struct.variety, struct.tag))\n        for name, ctype in struct.members:\n            if isinstance(ctype, CtypesBitfield):\n                self.file.write(\n                    \"    ('%s', %s, %s),\\n\"\n                    % (name, ctype.py_string(), ctype.bitfield.py_string(False))\n                )\n            else:\n                self.file.write(\"    ('%s', %s),\\n\" % (name, ctype.py_string()))\n        self.file.write(\"]\\n\")\n\n    def print_enum(self, enum):\n        self.file.write(\"enum_%s = c_int\" % enum.tag)\n        self.srcinfo(enum.src)\n        # Values of enumerator are output as constants.\n\n    def print_function(self, function):\n        if function.variadic:\n            self.print_variadic_function(function)\n        else:\n            self.print_fixed_function(function)\n\n    def print_fixed_function(self, function):\n        self.srcinfo(function.src)\n\n        CC = \"stdcall\" if function.attrib.get(\"stdcall\", False) else \"cdecl\"\n\n        # If we know what library the function lives in, or we have only a single library,\n        # look there. Otherwise, check all the libraries.\n        use_single_lib = function.source_library or len(self.options.libraries) == 1\n        if use_single_lib:\n            lib = function.source_library if function.source_library else self.options.libraries[0]\n            self.file.write(\n                'if _libs[\"{L}\"].has(\"{CN}\", \"{CC}\"):\\n'\n                '    {PN} = _libs[\"{L}\"].get(\"{CN}\", \"{CC}\")\\n'.format(\n                    L=lib, CN=function.c_name(), PN=function.py_name(), CC=CC\n                )\n            )\n        else:\n            self.file.write(\n                \"for _lib in _libs.values():\\n\"\n                '    if not _lib.has(\"{CN}\", \"{CC}\"):\\n'\n                \"        continue\\n\"\n                '    {PN} = _lib.get(\"{CN}\", \"{CC}\")\\n'.format(\n                    CN=function.c_name(), PN=function.py_name(), CC=CC\n                )\n            )\n\n        # Argument types\n        self.file.write(\n            \"    %s.argtypes = [%s]\\n\"\n            % (function.py_name(), \", \".join([a.py_string() for a in function.argtypes]))\n        )\n\n        # Return value\n        if function.restype.py_string() == \"String\":\n            self.file.write(\n                \"    if sizeof(c_int) == sizeof(c_void_p):\\n\"\n                \"        {PN}.restype = ReturnString\\n\"\n                \"    else:\\n\"\n                \"        {PN}.restype = {RT}\\n\"\n                \"        {PN}.errcheck = ReturnString\\n\".format(\n                    PN=function.py_name(), RT=function.restype.py_string()\n                )\n            )\n        else:\n            self.file.write(\n                \"    %s.restype = %s\\n\" % (function.py_name(), function.restype.py_string())\n            )\n            if function.errcheck:\n                self.file.write(\n                    \"    %s.errcheck = %s\\n\" % (function.py_name(), function.errcheck.py_string())\n                )\n\n        if not use_single_lib:\n            self.file.write(\"    break\\n\")\n\n    def print_variadic_function(self, function):\n        CC = \"stdcall\" if function.attrib.get(\"stdcall\", False) else \"cdecl\"\n\n        self.srcinfo(function.src)\n        if function.source_library:\n            self.file.write(\n                'if _libs[\"{L}\"].has(\"{CN}\", \"{CC}\"):\\n'\n                '    _func = _libs[\"{L}\"].get(\"{CN}\", \"{CC}\")\\n'\n                \"    _restype = {RT}\\n\"\n                \"    _errcheck = {E}\\n\"\n                \"    _argtypes = [{t0}]\\n\"\n                \"    {PN} = _variadic_function(_func,_restype,_argtypes,_errcheck)\\n\".format(\n                    L=function.source_library,\n                    CN=function.c_name(),\n                    RT=function.restype.py_string(),\n                    E=function.errcheck.py_string(),\n                    t0=\", \".join([a.py_string() for a in function.argtypes]),\n                    PN=function.py_name(),\n                    CC=CC,\n                )\n            )\n        else:\n            self.file.write(\n                \"for _lib in _libs.values():\\n\"\n                '    if _lib.has(\"{CN}\", \"{CC}\"):\\n'\n                '        _func = _lib.get(\"{CN}\", \"{CC}\")\\n'\n                \"        _restype = {RT}\\n\"\n                \"        _errcheck = {E}\\n\"\n                \"        _argtypes = [{t0}]\\n\"\n                \"        {PN} = _variadic_function(_func,_restype,_argtypes,_errcheck)\\n\".format(\n                    CN=function.c_name(),\n                    RT=function.restype.py_string(),\n                    E=function.errcheck.py_string(),\n                    t0=\", \".join([a.py_string() for a in function.argtypes]),\n                    PN=function.py_name(),\n                    CC=CC,\n                )\n            )\n\n    def print_variable(self, variable):\n        self.srcinfo(variable.src)\n        if variable.source_library:\n            self.file.write(\n                \"try:\\n\"\n                '    {PN} = ({PS}).in_dll(_libs[\"{L}\"], \"{CN}\")\\n'\n                \"except:\\n\"\n                \"    pass\\n\".format(\n                    PN=variable.py_name(),\n                    PS=variable.ctype.py_string(),\n                    L=variable.source_library,\n                    CN=variable.c_name(),\n                )\n            )\n        else:\n            self.file.write(\n                \"for _lib in _libs.values():\\n\"\n                \"    try:\\n\"\n                '        {PN} = ({PS}).in_dll(_lib, \"{CN}\")\\n'\n                \"        break\\n\"\n                \"    except:\\n\"\n                \"        pass\\n\".format(\n                    PN=variable.py_name(), PS=variable.ctype.py_string(), CN=variable.c_name()\n                )\n            )\n\n    def print_macro(self, macro):\n        # important: must check precisely against None because params may be\n        # an empty list for a func macro\n        if macro.params is None:\n            self.print_simple_macro(macro)\n        else:\n            self.print_func_macro(macro)\n\n    def print_simple_macro(self, macro):\n        # The macro translator makes heroic efforts but it occasionally fails.\n        # We want to contain the failures as much as possible.\n        # Hence the try statement.\n        self.srcinfo(macro.src)\n        self.file.write(\n            \"try:\\n\"\n            \"    {MN} = {ME}\\n\"\n            \"except:\\n\"\n            \"    pass\\n\".format(MN=macro.name, ME=macro.expr.py_string(True))\n        )\n\n    def print_func_macro(self, macro):\n        self.srcinfo(macro.src)\n        self.file.write(\n            \"def {MN}({MP}):\\n\"\n            \"    return {ME}\\n\".format(\n                MN=macro.name, MP=\", \".join(macro.params), ME=macro.expr.py_string(True)\n            )\n        )\n\n    def strip_prefixes(self):\n        if not self.options.strip_prefixes:\n            self.file.write(\"# No prefix-stripping\\n\\n\")\n            return\n\n        self.file.write(\n            \"# Begin prefix-stripping\\n\"\n            \"\\n\"\n            \"# Strip prefixes from all symbols following regular expression:\\n\"\n            \"# {expr}\\n\"\n            \"\\n\"\n            \"import re as __re_module\\n\"\n            \"\\n\"\n            \"__strip_expr = __re_module.compile('{expr}')\\n\"\n            \"for __k, __v in globals().copy().items():\\n\"\n            \"    __m = __strip_expr.match(__k)\\n\"\n            \"    if __m:\\n\"\n            \"        globals()[__k[__m.end():]] = __v\\n\"\n            \"        # remove symbol with prefix(?)\\n\"\n            \"        # globals().pop(__k)\\n\"\n            \"del __re_module, __k, __v, __m, __strip_expr\\n\"\n            \"\\n\"\n            \"# End prefix-stripping\\n\"\n            \"\\n\".format(expr=\"({})\".format(\"|\".join(self.options.strip_prefixes)))\n        )\n\n    def insert_file(self, filename):\n        try:\n            inserted_file = open(filename, \"r\")\n        except IOError:\n            error_message('Cannot open file \"%s\". Skipped it.' % filename, cls=\"missing-file\")\n\n        self.file.write(\n            '# Begin \"{filename}\"\\n'\n            \"\\n{file}\\n\"\n            '# End \"{filename}\"\\n'.format(filename=filename, file=inserted_file.read())\n        )\n\n        inserted_file.close()\n"
  },
  {
    "path": "ctypesgen/processor/__init__.py",
    "content": "\"\"\"\nThis module contains functions to operate on the DeclarationCollection produced\nby the parser module and prepare it for output.\n\nA convenience_function, process(), calls everything else.\n\"\"\"\n\n__all__ = [\"process\"]\n\nfrom .pipeline import process\n"
  },
  {
    "path": "ctypesgen/processor/dependencies.py",
    "content": "\"\"\"\nThe dependencies module determines which descriptions depend on which other\ndescriptions.\n\"\"\"\n\nfrom ctypesgen.descriptions import MacroDescription, UndefDescription\nfrom ctypesgen.ctypedescs import visit_type_and_collect_info\n\n\ndef find_dependencies(data, opts):\n    \"\"\"Visit each description in `data` and figure out which other descriptions\n    it depends on, putting the results in desc.requirements. Also find errors in\n    ctypedecls or expressions attached to the description and transfer them to the\n    description.\"\"\"\n\n    struct_names = {}\n    enum_names = {}\n    typedef_names = {}\n    ident_names = {}\n\n    # Start the lookup tables with names from imported modules\n\n    for name in opts.other_known_names:\n        typedef_names[name] = None\n        ident_names[name] = None\n        if name.startswith(\"struct_\") or name.startswith(\"enum_\"):\n            variety = name.split(\"_\")[0]\n            tag = \"_\".join(name.split(\"_\")[1:])\n            struct_names[(variety, tag)] = None\n        if name.startswith(\"enum_\"):\n            enum_names[name] = None\n\n    def depend(desc, nametable, name):\n        \"\"\"Try to add `name` as a requirement for `desc`, looking `name` up in\n        `nametable`. Returns True if found.\"\"\"\n\n        if name in nametable:\n            requirement = nametable[name]\n            if requirement:\n                desc.add_requirements([requirement])\n            return True\n        else:\n            return False\n\n    def co_depend(desc, nametable, name):\n        \"\"\"\n        Try to add `name` as a requirement for `desc`, looking `name` up in\n        `nametable`.  Also try to add desc as a requirement for `name`.\n\n        Returns Description of `name` if found.\n        \"\"\"\n\n        requirement = nametable.get(name, None)\n        if requirement is None:\n            return\n\n        desc.add_requirements([requirement])\n        requirement.add_requirements([desc])\n        return requirement\n\n    def find_dependencies_for(desc, kind):\n        \"\"\"Find all the descriptions that `desc` depends on and add them as\n        dependencies for `desc`. Also collect error messages regarding `desc` and\n        convert unlocateable descriptions into error messages.\"\"\"\n\n        if kind == \"constant\":\n            roots = [desc.value]\n        elif kind == \"struct\":\n            roots = []\n        elif kind == \"struct-body\":\n            roots = [desc.ctype]\n        elif kind == \"enum\":\n            roots = []\n        elif kind == \"typedef\":\n            roots = [desc.ctype]\n        elif kind == \"function\":\n            roots = desc.argtypes + [desc.restype]\n        elif kind == \"variable\":\n            roots = [desc.ctype]\n        elif kind == \"macro\":\n            if desc.expr:\n                roots = [desc.expr]\n            else:\n                roots = []\n        elif kind == \"undef\":\n            roots = [desc.macro]\n\n        cstructs, cenums, ctypedefs, errors, identifiers = [], [], [], [], []\n\n        for root in roots:\n            s, e, t, errs, i = visit_type_and_collect_info(root)\n            cstructs.extend(s)\n            cenums.extend(e)\n            ctypedefs.extend(t)\n            errors.extend(errs)\n            identifiers.extend(i)\n\n        unresolvables = []\n\n        for cstruct in cstructs:\n            if kind == \"struct\" and desc.variety == cstruct.variety and desc.tag == cstruct.tag:\n                continue\n            if not depend(desc, struct_names, (cstruct.variety, cstruct.tag)):\n                unresolvables.append('%s \"%s\"' % (cstruct.variety, cstruct.tag))\n\n        for cenum in cenums:\n            if kind == \"enum\" and desc.tag == cenum.tag:\n                continue\n            if not depend(desc, enum_names, cenum.tag):\n                unresolvables.append('enum \"%s\"' % cenum.tag)\n\n        for ctypedef in ctypedefs:\n            if not depend(desc, typedef_names, ctypedef):\n                unresolvables.append('typedef \"%s\"' % ctypedef)\n\n        for ident in identifiers:\n            if isinstance(desc, MacroDescription) and desc.params and ident in desc.params:\n                continue\n\n            elif opts.include_undefs and isinstance(desc, UndefDescription):\n                macro_desc = None\n                if ident == desc.macro.name:\n                    macro_desc = co_depend(desc, ident_names, ident)\n                if macro_desc is None or not isinstance(macro_desc, MacroDescription):\n                    unresolvables.append('identifier \"%s\"' % ident)\n\n            elif not depend(desc, ident_names, ident):\n                unresolvables.append('identifier \"%s\"' % ident)\n\n        for u in unresolvables:\n            errors.append((\"%s depends on an unknown %s.\" % (desc.casual_name(), u), None))\n\n        for err, cls in errors:\n            err += \" %s will not be output\" % desc.casual_name()\n            desc.error(err, cls=cls)\n\n    def add_to_lookup_table(desc, kind):\n        \"\"\"Add `desc` to the lookup table so that other descriptions that use\n        it can find it.\"\"\"\n        if kind == \"struct\":\n            if (desc.variety, desc.tag) not in struct_names:\n                struct_names[(desc.variety, desc.tag)] = desc\n        if kind == \"enum\":\n            if desc.tag not in enum_names:\n                enum_names[desc.tag] = desc\n        if kind == \"typedef\":\n            if desc.name not in typedef_names:\n                typedef_names[desc.name] = desc\n        if kind in (\"function\", \"constant\", \"variable\", \"macro\"):\n            if desc.name not in ident_names:\n                ident_names[desc.name] = desc\n\n    # Macros are handled differently from everything else because macros can\n    # call other macros that are referenced after them in the input file, but\n    # no other type of description can look ahead like that.\n\n    for kind, desc in data.output_order:\n        add_to_lookup_table(desc, kind)\n        if kind != \"macro\":\n            find_dependencies_for(desc, kind)\n\n    for kind, desc in data.output_order:\n        if kind == \"macro\":\n            find_dependencies_for(desc, kind)\n"
  },
  {
    "path": "ctypesgen/processor/operations.py",
    "content": "\"\"\"\nThe operations module contains various functions to process the\nDescriptionCollection and prepare it for output.\nctypesgen.processor.pipeline calls the operations module.\n\"\"\"\n\nimport re\nimport os\nimport keyword\n\nfrom ctypesgen import libraryloader\nfrom ctypesgen.descriptions import (\n    EnumDescription,\n    StructDescription,\n    TypedefDescription,\n)\nfrom ctypesgen.messages import warning_message, status_message\n\n\n# Processor functions\n\n\ndef automatically_typedef_structs(data, options):\n    \"\"\"automatically_typedef_structs() aliases \"struct_<tag>\" to \"<tag>\" for\n    every struct and union.\"\"\"\n    # XXX Check if it has already been aliased in the C code.\n\n    for struct in data.structs:\n        if not struct.ctype.anonymous:  # Don't alias anonymous structs\n            typedef = TypedefDescription(struct.tag, struct.ctype, src=struct.src)\n            typedef.add_requirements(set([struct]))\n\n            data.typedefs.append(typedef)\n            data.all.insert(data.all.index(struct) + 1, typedef)\n            data.output_order.append((\"typedef\", typedef))\n\n\ndef remove_NULL(data, options):\n    \"\"\"remove_NULL() removes any NULL definitions from the C headers because\n    ctypesgen supplies its own NULL definition.\"\"\"\n\n    for macro in data.macros:\n        if macro.name == \"NULL\":\n            macro.include_rule = \"never\"\n\n\ndef remove_descriptions_in_system_headers(data, opts):\n    \"\"\"remove_descriptions_in_system_headers() removes descriptions if they came\n    from files outside of the header files specified from the command line.\"\"\"\n\n    known_headers = [os.path.basename(x) for x in opts.headers]\n\n    for description in data.all:\n        if description.src is not None:\n            if description.src[0] == \"<command line>\":\n                description.include_rule = \"if_needed\"\n            elif description.src[0] == \"<built-in>\":\n                if not opts.builtin_symbols:\n                    description.include_rule = \"if_needed\"\n            elif os.path.basename(description.src[0]) not in known_headers:\n                if not opts.all_headers:\n                    # If something else requires this, include it even though\n                    # it is in a system header file.\n                    description.include_rule = \"if_needed\"\n\n\ndef remove_macros(data, opts):\n    \"\"\"remove_macros() removes macros if --no-macros is set.\"\"\"\n    if not opts.include_macros:\n        for macro in data.macros:\n            macro.include_rule = \"never\"\n\n\ndef filter_by_regexes_exclude(data, opts):\n    \"\"\"filter_by_regexes_exclude() uses regular expressions specified by options\n    dictionary to filter symbols.\"\"\"\n    if opts.exclude_symbols:\n        expr = re.compile(\"({})\".format(\"|\".join(opts.exclude_symbols)))\n        for object in data.all:\n            if expr.match(object.py_name()):\n                object.include_rule = \"never\"\n\n\ndef filter_by_regexes_include(data, opts):\n    \"\"\"filter_by_regexes_include() uses regular expressions specified by options\n    dictionary to re-include symbols previously rejected by other operations.\"\"\"\n    if opts.include_symbols:\n        expr = re.compile(\"({})\".format(\"|\".join(opts.include_symbols)))\n        for object in data.all:\n            if object.include_rule != \"never\":\n                if expr.match(object.py_name()):\n                    object.include_rule = \"yes\"\n\n\ndef fix_conflicting_names(data, opts):\n    \"\"\"If any descriptions from the C code would overwrite Python builtins or\n    other important names, fix_conflicting_names() adds underscores to resolve\n    the name conflict.\"\"\"\n\n    # This is the order of priority for names\n    descriptions = (\n        data.functions\n        + data.variables\n        + data.structs\n        + data.typedefs\n        + data.enums\n        + data.constants\n        + data.macros\n    )\n\n    # This dictionary maps names to a string representing where the name\n    # came from.\n    important_names = {}\n\n    preamble_names = set()\n    preamble_names = preamble_names.union(\n        [\n            \"DarwinLibraryLoader\",\n            \"LibraryLoader\",\n            \"LinuxLibraryLoader\",\n            \"WindowsLibraryLoader\",\n            \"_WindowsLibrary\",\n            \"add_library_search_dirs\",\n            \"_environ_path\",\n            \"ctypes\",\n            \"load_library\",\n            \"loader\",\n            \"os\",\n            \"re\",\n            \"sys\",\n        ]\n    )\n    preamble_names = preamble_names.union(\n        [\n            \"ArgumentError\",\n            \"CFUNCTYPE\",\n            \"POINTER\",\n            \"ReturnString\",\n            \"String\",\n            \"Structure\",\n            \"UNCHECKED\",\n            \"Union\",\n            \"UserString\",\n            \"_variadic_function\",\n            \"addressof\",\n            \"c_buffer\",\n            \"c_byte\",\n            \"c_char\",\n            \"c_char_p\",\n            \"c_double\",\n            \"c_float\",\n            \"c_int\",\n            \"c_int16\",\n            \"c_int32\",\n            \"c_int64\",\n            \"c_int8\",\n            \"c_long\",\n            \"c_longlong\",\n            \"c_ptrdiff_t\",\n            \"c_short\",\n            \"c_size_t\",\n            \"c_ubyte\",\n            \"c_uint\",\n            \"c_uint16\",\n            \"c_uint32\",\n            \"c_uint64\",\n            \"c_uint8\",\n            \"c_ulong\",\n            \"c_ulonglong\",\n            \"c_ushort\",\n            \"c_void\",\n            \"c_void_p\",\n            \"c_voidp\",\n            \"c_wchar\",\n            \"c_wchar_p\",\n            \"cast\",\n            \"ctypes\",\n            \"os\",\n            \"pointer\",\n            \"sizeof\",\n        ]\n    )\n    for name in preamble_names:\n        important_names[name] = \"a name needed by ctypes or ctypesgen\"\n    for name in dir(__builtins__):\n        important_names[name] = \"a Python builtin\"\n    for name in opts.other_known_names:\n        important_names[name] = \"a name from an included Python module\"\n    for name in keyword.kwlist:\n        important_names[name] = \"a Python keyword\"\n\n    for description in descriptions:\n        if description.py_name() in important_names:\n            conflict_name = important_names[description.py_name()]\n\n            original_name = description.casual_name()\n            while description.py_name() in important_names:\n                if isinstance(description, (StructDescription, EnumDescription)):\n                    description.tag += \"_\"\n                else:\n                    description.name = \"_\" + description.name\n\n            if not description.dependents:\n                description.warning(\n                    \"%s has been renamed to %s due to a name \"\n                    \"conflict with %s.\" % (original_name, description.casual_name(), conflict_name),\n                    cls=\"rename\",\n                )\n            else:\n                description.warning(\n                    \"%s has been renamed to %s due to a name \"\n                    \"conflict with %s. Other objects depend on %s - those \"\n                    \"objects will be skipped.\"\n                    % (original_name, description.casual_name(), conflict_name, original_name),\n                    cls=\"rename\",\n                )\n\n                for dependent in description.dependents:\n                    dependent.include_rule = \"never\"\n\n            if description.include_rule == \"yes\":\n                important_names[description.py_name()] = description.casual_name()\n\n    # Names of struct members don't conflict with much, but they can conflict\n    # with Python keywords.\n\n    for struct in data.structs:\n        if not struct.opaque:\n            for i, (name, type) in enumerate(struct.members):\n                if name in keyword.kwlist:\n                    struct.members[i] = (\"_\" + name, type)\n                    struct.warning(\n                        'Member \"%s\" of %s has been renamed to '\n                        '\"%s\" because it has the same name as a Python '\n                        \"keyword.\" % (name, struct.casual_name(), \"_\" + name),\n                        cls=\"rename\",\n                    )\n\n    # Macro arguments may be have names that conflict with Python keywords.\n    # In a perfect world, this would simply rename the parameter instead\n    # of throwing an error message.\n\n    for macro in data.macros:\n        if macro.params:\n            for param in macro.params:\n                if param in keyword.kwlist:\n                    macro.error(\n                        'One of the parameters to %s, \"%s\" has the '\n                        \"same name as a Python keyword. %s will be skipped.\"\n                        % (macro.casual_name(), param, macro.casual_name()),\n                        cls=\"name-conflict\",\n                    )\n\n\ndef find_source_libraries(data, opts):\n    \"\"\"find_source_libraries() determines which library contains each function\n    and variable.\"\"\"\n\n    all_symbols = data.functions + data.variables\n\n    for symbol in all_symbols:\n        symbol.source_library = None\n\n    libraryloader.add_library_search_dirs(opts.compile_libdirs)\n\n    for library_name in opts.libraries:\n        if opts.no_load_library:\n            status_message(\"Bypass load_library %s\" % library_name)\n            continue\n\n        try:\n            library = libraryloader.load_library(library_name)\n        except ImportError:\n            warning_message(\n                'Could not load library \"%s\". Okay, I\\'ll '\n                \"try to load it at runtime instead. \" % (library_name),\n                cls=\"missing-library\",\n            )\n            continue\n        for symbol in all_symbols:\n            if symbol.source_library is None:\n                if hasattr(library, symbol.c_name()):\n                    symbol.source_library = library_name\n"
  },
  {
    "path": "ctypesgen/processor/pipeline.py",
    "content": "\"\"\"\nA brief explanation of the processing steps:\n1. The dependencies module builds a dependency graph for the descriptions.\n\n2. Operation functions are called to perform various operations on the\ndescriptions. The operation functions are found in operations.py.\n\n3. If an operation function decides to exclude a description from the output, it\nsets 'description.include_rule' to \"never\"; if an operation function decides not\nto include a description by default, but to allow if required, it sets\n'description.include_rule' to \"if_needed\".\n\n4. If an operation function encounters an error that makes a description unfit\nfor output, it appends a string error message to 'description.errors'.\n'description.warnings' is a list of warning messages that will be displayed but\nwill not prevent the description from being output.\n\n5. Based on 'description.include_rule', calculate_final_inclusion() decides\nwhich descriptions to include in the output. It sets 'description.included' to\nTrue or False.\n\n6. For each description, print_errors_encountered() checks if there are error\nmessages in 'description.errors'. If so, print_errors_encountered() prints the\nerror messages, but only if 'description.included' is True - it doesn't bother\nthe user with error messages regarding descriptions that would not be in the\noutput anyway. It also prints 'description.warnings'.\n\n7. calculate_final_inclusion() is called again to recalculate based on\nthe errors that print_errors_encountered() has flagged.\n\n\"\"\"\n\nfrom ctypesgen.descriptions import MacroDescription\nfrom ctypesgen.messages import (\n    error_message,\n    status_message,\n    warning_message,\n)\nfrom ctypesgen.processor.dependencies import find_dependencies\nfrom ctypesgen.processor.operations import (\n    automatically_typedef_structs,\n    filter_by_regexes_exclude,\n    filter_by_regexes_include,\n    find_source_libraries,\n    fix_conflicting_names,\n    remove_descriptions_in_system_headers,\n    remove_macros,\n    remove_NULL,\n)\n\n\ndef process(data, options):\n    status_message(\"Processing description list.\")\n\n    find_dependencies(data, options)\n\n    automatically_typedef_structs(data, options)\n    remove_NULL(data, options)\n    remove_descriptions_in_system_headers(data, options)\n    filter_by_regexes_exclude(data, options)\n    filter_by_regexes_include(data, options)\n    remove_macros(data, options)\n    if options.output_language.startswith(\"py\"):\n        # this function is python specific\n        fix_conflicting_names(data, options)\n    find_source_libraries(data, options)\n\n    calculate_final_inclusion(data, options)\n    print_errors_encountered(data, options)\n    calculate_final_inclusion(data, options)\n\n\ndef calculate_final_inclusion(data, opts):\n    \"\"\"Calculates which descriptions will be included in the output library.\n\n    An object with include_rule=\"never\" is never included.\n    An object with include_rule=\"yes\" is included if its requirements can be included.\n    An object with include_rule=\"if_needed\" is included if an object to be included\n        requires it and if its requirements can be included.\n    \"\"\"\n\n    def can_include_desc(desc):\n        if desc.can_include is None:\n            if desc.include_rule == \"no\":\n                desc.can_include = False\n            elif desc.include_rule == \"yes\" or desc.include_rule == \"if_needed\":\n                desc.can_include = True\n                for req in desc.requirements:\n                    if not can_include_desc(req):\n                        desc.can_include = False\n        return desc.can_include\n\n    def do_include_desc(desc):\n        if desc.included:\n            return  # We've already been here\n        desc.included = True\n        for req in desc.requirements:\n            do_include_desc(req)\n\n    for desc in data.all:\n        desc.can_include = None  # None means \"Not Yet Decided\"\n        desc.included = False\n\n    for desc in data.all:\n        if desc.include_rule == \"yes\":\n            if can_include_desc(desc):\n                do_include_desc(desc)\n\n\ndef print_errors_encountered(data, opts):\n    # See descriptions.py for an explanation of the error-handling mechanism\n    for desc in data.all:\n        # If description would not have been included, dont bother user by\n        # printing warnings.\n        if desc.included or opts.show_all_errors:\n            if opts.show_long_errors or len(desc.errors) + len(desc.warnings) <= 2:\n                for error, cls in desc.errors:\n                    # Macro errors will always be displayed as warnings.\n                    if isinstance(desc, MacroDescription):\n                        if opts.show_macro_warnings:\n                            warning_message(error, cls)\n                    else:\n                        error_message(error, cls)\n                for warning, cls in desc.warnings:\n                    warning_message(warning, cls)\n\n            else:\n                if desc.errors:\n                    error1, cls1 = desc.errors[0]\n                    error_message(error1, cls1)\n                    numerrs = len(desc.errors) - 1\n                    numwarns = len(desc.warnings)\n                    if numwarns:\n                        error_message(\n                            \"%d more errors and %d more warnings \"\n                            \"for %s\" % (numerrs, numwarns, desc.casual_name())\n                        )\n                    else:\n                        error_message(\"%d more errors for %s \" % (numerrs, desc.casual_name()))\n                else:\n                    warning1, cls1 = desc.warnings[0]\n                    warning_message(warning1, cls1)\n                    warning_message(\n                        \"%d more errors for %s\" % (len(desc.warnings) - 1, desc.casual_name())\n                    )\n        if desc.errors:\n            # process() will recalculate to take this into account\n            desc.include_rule = \"never\"\n"
  },
  {
    "path": "ctypesgen/version.py",
    "content": "#!/usr/bin/env python3\n\nfrom subprocess import Popen, PIPE\nimport os\nfrom os import path\n\nTHIS_DIR = path.dirname(__file__)\nVERSION_FILE = path.join(THIS_DIR, \"VERSION\")\nDEFAULT_PREFIX = \"ctypesgen\"\n\n__all__ = [\"VERSION\", \"version_tuple\", \"version\", \"compatible\"]\n\n\ndef version_tuple(v):\n    try:\n        vs = v.split(\"-\")\n        t = tuple(int(i) for i in vs[1].split(\".\"))\n        if len(vs) > 2:\n            t += (int(vs[2]),)\n        return t\n    except Exception:\n        return (-1, -1, -1, v)\n\n\ndef read_file_version():\n    f = open(VERSION_FILE)\n    v = f.readline()\n    f.close()\n    return v.strip()\n\n\ndef version():\n    try:\n        args = {\"cwd\": THIS_DIR}\n        devnull = open(os.devnull, \"w\")\n        p = Popen([\"git\", \"describe\"], stdout=PIPE, stderr=devnull, **args)\n        out, err = p.communicate()\n        if p.returncode:\n            raise RuntimeError(\"no version defined?\")\n        git_tag = out.strip().decode()\n        return f\"{DEFAULT_PREFIX}-{git_tag}\"\n    except Exception:\n        # failover is to try VERSION_FILE instead\n        try:\n            return f\"{DEFAULT_PREFIX}-{read_file_version()}\"\n        except Exception:\n            return f\"{DEFAULT_PREFIX}-0.0.0\"\n\n\ndef version_number():\n    return version().partition(\"-\")[-1]\n\n\ndef compatible(v0, v1):\n    v0 = version_tuple(v0)\n    v1 = version_tuple(v1)\n    return v0[:2] == v1[:2]\n\n\ndef write_version_file(v=None):\n    if v is None:\n        v = version()\n    f = open(VERSION_FILE, \"w\")\n    f.write(v)\n    f.close()\n\n\nVERSION = version()\nVERSION_NUMBER = version_number()\n\n\nif __name__ == \"__main__\":\n    import argparse\n\n    p = argparse.ArgumentParser()\n    p.add_argument(\"--save\", action=\"store_true\", help=f\"Store version to {VERSION_FILE}\")\n    p.add_argument(\n        \"--read-file-version\",\n        action=\"store_true\",\n        help=f\"Read the version stored in {VERSION_FILE}\",\n    )\n    args = p.parse_args()\n\n    v = version()\n    if args.save:\n        write_version_file(v)\n    if args.read_file_version:\n        v = read_file_version()\n    print(v)\n"
  },
  {
    "path": "debian/.gitignore",
    "content": "changelog\ncopyright\nctypesgen/\n*.log\n*.substvars\nfiles\ndebhelper-build-stamp\n*.postinst.debhelper\n*.prerm.debhelper\n*.swp\nctypesgen.1\n"
  },
  {
    "path": "debian/compat",
    "content": "9\n"
  },
  {
    "path": "debian/control",
    "content": "Source: ctypesgen\nSection: universe/python\nPriority: optional\nBuild-Depends: debhelper (>= 9), dh-python,\n python3-all (>=3.2), python3-setuptools, help2man\n#python-all (>=2.6.6-3~)\nMaintainer: Spencer E. Olson <olsonse@umich.edu>\nHomepage: https://github.com/davidjamesca/ctypesgen\nVcs-Git: https://github.com/davidjamesca/ctypesgen.git\nStandards-Version: 3.9.2\n#X-Python-Version: >= 2.7\n#XS-Python-Version: >= 2.7\nX-Python3-Version: >= 3.2\nXS-Python3-Version: >= 3.2\n\nPackage: ctypesgen\nArchitecture: all\nDepends: ${python3:Depends}, ${misc:Depends}\nDescription: Python ctypes wrapper generator\n ctypesgen reads parses c header files and creates a wrapper for libraries base\n on what it finds.  Preprocessor macros are handled in a manner consistent with\n typical c code.  Preprocessor macro functions are translated into Python\n functions that are then made available to the user of the newly-generated\n Python wrapper library.\n .\n ctypesgen can also output JSON, which can be used with Mork, which generates\n bindings for Lua, using the alien module (which binds libffi to Lua).\n"
  },
  {
    "path": "debian/ctypesgen.docs",
    "content": "demo/\n"
  },
  {
    "path": "debian/ctypesgen.manpages",
    "content": "debian/ctypesgen.1\n"
  },
  {
    "path": "debian/mk_changelog",
    "content": "#!/usr/bin/env python3\n# vim: ts=2:sw=2:et:tw=80:nowrap\n\nfrom subprocess import Popen, PIPE\nimport re, io\nfrom datetime import datetime\n\nclass GitVersion(object):\n  PACKAGE_FMT = '(?P<name>(-[^0-9]|[^-])*)-(?P<version>[0-9].*)'\n  PACKAGE_NAME= 'ctypesgen'\n\n  # basic `git describe` command (add --tags to include un-annotated tags)\n  GIT_DESCRIBE = 'git describe'.split()\n\n  def __init__(self, tag=None):\n    self.date = None\n\n    if tag is None:\n      p = Popen(self.GIT_DESCRIBE, stdout=PIPE)\n      self.current_git_version = p.communicate()[0].decode().strip()\n\n      p = Popen(self.GIT_DESCRIBE + ['--abbrev=0'], stdout=PIPE)\n      self.current_git_tag = p.communicate()[0].decode().strip()\n\n      # let's set the date for this version to the last commit\n      p = Popen(['git','log','-1','--format=%cD',self.current_git_version],\n                stdout=PIPE)\n      self.date = p.communicate()[0].decode().strip()\n    else:\n      self.current_git_version = self.current_git_tag = tag\n\n    p = Popen(self.GIT_DESCRIBE + [self.current_git_version+'~1', '--abbrev=0'],\n              stdout=PIPE, stderr=PIPE)\n    self.last_git_version = p.communicate()[0].decode().strip()\n\n    p = Popen(['git','show',self.current_git_tag], stdout=PIPE)\n    tag_commit = p.communicate()[0].decode()\n    if 'Tagger:' in tag_commit:\n      # this is an annotated tag\n      m = re.search('\\nTagger:\\s*(?P<name>[^\\n]*)\\nDate:\\s*(?P<date>[^\\n]*)\\n',\n                    tag_commit)\n    else:\n      # this is a non-annotated tag\n      m = re.search('\\nAuthor:\\s*(?P<name>[^\\n]*)\\nDate:\\s*(?P<date>[^\\n]*)\\n',\n                    tag_commit)\n\n    self.maintainer = m.groupdict()['name']\n    if not self.date:\n      date = datetime.strptime(m.groupdict()['date'], '%a %b %d %H:%M:%S %Y %z')\n      self.date = date.strftime('%a, %d %b %Y %H:%M:%S %z')\n\n  def match_version(self):\n    try:\n      D = re.match(self.PACKAGE_FMT, self.current_git_version).groupdict()\n      D.setdefault('name', self.PACKAGE_NAME)\n      return D\n    except:\n      return dict(name='unknown', version='0.0.0')\n\n  @property\n  def version_number(self):\n    m = self.match_version()\n    if '-g' in m['version']:\n      return m['version'].rpartition('-g')[0]\n    else:\n      return m['version']\n\n  @property\n  def package_name(self):\n    return self.match_version()['name']\n\n  @property\n  def changes(self):\n    if self.last_git_version:\n      p = Popen(['git', 'log',\n                 self.last_git_version + '..' + self.current_git_version,\n                 '--format=<log-header>* %s%n%b'], stdout=PIPE)\n      psed = Popen(['sed','-e','s/^/    /', '-e', 's/\\\\s*<log-header>\\\\*/  */'],\n                   stdin=p.stdout, stdout=PIPE)\n      return psed.communicate()[0].decode()\n    else:\n      return '  * Initial version\\n'\n\n  @property\n  def chlog_first_line(self):\n    return '{} ({}) unstable; urgency=low' \\\n      .format(self.package_name, self.version_number)\n\n  @property\n  def chlog_last_line(self):\n    return ' -- {}  {}\\n'.format(self.maintainer, self.date)\n\n  def __call__(self):\n    return '{}\\n{}{}\\n' \\\n      .format(self.chlog_first_line, self.changes, self.chlog_last_line)\n\n\nclass GitVersionCollection(object):\n  def __init__(self):\n    p = Popen(['git', 'tag'], stdout=PIPE)\n    self.tags = p.communicate()[0].decode().splitlines()\n    self.tags.reverse()\n\n    self.has_untagged_changes = False\n    p = Popen(GitVersion.GIT_DESCRIBE, stdout=PIPE)\n    current = p.communicate()[0].decode().strip()\n    if current not in self.tags:\n      self.has_untagged_changes = True\n\n  def __call__(self):\n    out = io.StringIO()\n    if self.has_untagged_changes:\n      out.write(GitVersion()())\n    for tag in self.tags:\n      out.write(GitVersion(tag)())\n\n    return out.getvalue()\n\n\ndef main():\n  c = GitVersionCollection()\n  print(c())\n\nif __name__=='__main__':\n  main()\n"
  },
  {
    "path": "debian/mk_manpage",
    "content": "#!/usr/bin/env python3\n\"\"\"\nThis script generates a manual page.\n\"\"\"\n\nfrom os import path, system, unlink, mkdir, rmdir\nimport debian.deb822\n\nTHIS_DIR = path.dirname(__file__)\n\nSECTIONS = \"\"\"\\\n[DESCRIPTION]\n{description}\n\n[AVAILABILITY]\n{homepage}\n\n[COPYRIGHT]\n{LICENSE}\n\"\"\"\n\ndef mkman():\n  D = dict()\n\n  with open(path.join(THIS_DIR, 'control')) as f:\n    Source, P = debian.deb822.Packages.iter_paragraphs(f)\n  desc_paragraphs = P['Description'].splitlines()\n\n  D['homepage'] = Source['Homepage']\n  D['longname'] = desc_paragraphs[0]\n\n  desc = []\n  for di in desc_paragraphs[1:]:\n    dis = di.strip()\n    desc.append('' if dis == '.' else dis)\n\n  D['description'] = '\\n'.join(desc)\n\n  with open(path.join(THIS_DIR, path.pardir, 'LICENSE')) as f:\n    D['LICENSE'] = f.read()\n\n  with open(path.join(THIS_DIR, 'ctypesgen.1.input'), 'w') as f:\n    f.write(SECTIONS.format(**D))\n\n  try: mkdir(path.join(THIS_DIR,'tmp.cmdir'))\n  except FileExistsError: pass\n  system('ln -s {} {}'.format(\n    path.abspath(path.join(THIS_DIR,path.pardir, 'run.py')),\n    path.join(THIS_DIR, 'tmp.cmdir', 'ctypesgen')))\n  system('help2man -n \"{}\" -s 1 -N -i {} -o {} {}'.format(\n    D['longname'],\n    path.join(THIS_DIR, 'ctypesgen.1.input'),\n    path.join(THIS_DIR, 'ctypesgen.1'),\n    path.join(THIS_DIR, 'tmp.cmdir', 'ctypesgen')))\n  # cleanup\n  unlink(path.join(THIS_DIR, 'tmp.cmdir', 'ctypesgen'))\n  unlink(path.join(THIS_DIR, 'ctypesgen.1.input'))\n  rmdir(path.join(THIS_DIR, 'tmp.cmdir'))\n\nif __name__ == '__main__':\n  mkman()\n"
  },
  {
    "path": "debian/rules",
    "content": "#!/usr/bin/make -f\n# debian/rules\n# -*- makefile -*-\n\nexport DH_VERBOSE=1\nDH_VERBOSE = 1\n\n# see EXAMPLES in dpkg-buildflags(1) and read /usr/share/dpkg/*\nDPKG_EXPORT_BUILDFLAGS = 1\ninclude /usr/share/dpkg/default.mk\n\n# see FEATURE AREAS in dpkg-buildflags(1)\n#export DEB_BUILD_MAINT_OPTIONS = hardening=+all\n\n# see ENVIRONMENT in dpkg-buildflags(1)\n# package maintainers to append CFLAGS\n#export DEB_CFLAGS_MAINT_APPEND  = -Wall -pedantic\n# package maintainers to append LDFLAGS\n#export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed\n\n# main packaging script based on dh7 syntax\n%:\n\tdh $@ --with python3 --buildsystem=pybuild\n\nbuild binary :  $(CURDIR)/debian/changelog \\\n\t\t$(CURDIR)/debian/copyright \\\n\t\t$(CURDIR)/debian/ctypesgen.1\n\noverride_dh_compress:\n\tdh_compress -X.c -X.py\n\noverride_dh_installdocs:\n\tdh_installdocs\n\t$(RM) $(CURDIR)/debian/ctypesgen/usr/share/doc/ctypesgen/demo/.gitignore\n\n$(CURDIR)/debian/changelog : $(CURDIR)/debian/mk_changelog\n\t$(CURDIR)/debian/mk_changelog > $(CURDIR)/debian/changelog\n\n$(CURDIR)/debian/copyright : $(CURDIR)/LICENSE\n\tcp $(CURDIR)/LICENSE $(CURDIR)/debian/copyright\n\n$(CURDIR)/debian/ctypesgen.1 : $(CURDIR)/debian/mk_manpage \\\n\t\t\t       $(CURDIR)/debian/control \\\n\t\t\t       $(CURDIR)/LICENSE\n\t$(CURDIR)/debian/mk_manpage\n"
  },
  {
    "path": "demo/.gitignore",
    "content": "demoapp\ndemolib.o\ndemolib.so\n"
  },
  {
    "path": "demo/README.md",
    "content": "Small Demonstration of Ctypesgen\n================================\n\nThis little demonstration was originally written by developer clach04 (when this\nwas still residing on code.google.com).  This example shows how bindings for a\nvery simple c-library and associated header can be quickly generated using\nCtypesgen and accessed by a Python program.\n\nMost of the instructions are included in the top of the various files, but a\nsummary is given here.\n\n\nSteps:\n----------\n1. Compile the shared c-library\n\n    `gcc -fPIC -shared -o demolib.so demolib.c`\n\n2. (Re)Generate the bindings (or you can just try the bindings that were\n    already generated and saved in this directory)\n\n    `../run.py -o pydemolib.py -l demolib.so demolib.h`\n\n3. Run the app that uses these newly generated bindings\n\n    `./demoapp.py`\n\n    The results of this execution should give\n\n    ```\n    a 1\n    b 2\n    result 3\n    ```\n\n4. You can also try executing the same code completely from a c-program\n\n    - Compile test code:\n\n        `gcc -o demoapp demoapp.c  demolib.c demolib.h`\n\n    - Execute:\n\n        `./demoapp`\n\n    - Observe the same results as before:\n\n        ```\n        a 1\n        b 2\n        result 3\n        ```\n"
  },
  {
    "path": "demo/demoapp.c",
    "content": "/*\n** Trivial ctypesgen demo library consumer\n**  from http://code.google.com/p/ctypesgen\n**\n** This demoapp it self is not useful, it is a sanity check for the library.\n**\n**  Build static:   cc -o demoapp demoapp.c  demolib.c  demolib.h\n**\n*/\n\n\n#include <stdlib.h>\n#include <stdio.h>\n\n#include \"demolib.h\"\n\nint main(int argc, char **argv)\n{\n    int a = 1;\n    int b = 2;\n    int result = 0;\n\n    result = trivial_add(a, b);\n    printf(\"a %d\\n\", a);\n    printf(\"b %d\\n\", b);\n    printf(\"result %d\\n\", result);\n}\n"
  },
  {
    "path": "demo/demoapp.py",
    "content": "#!/usr/bin/env python3\n\"\"\"\nTrivial ctypesgen demo library consumer\nfrom http://code.google.com/p/ctypesgen\n\n NOTE demolib.py needs to be generated via:\n\n    ../run.py -o pydemolib.py -l demolib demolib.h\n    ../run.py -o pydemolib.py -l demolib.so demolib.h\n\n\n\"\"\"\n\nimport sys\n\nimport pydemolib  # generated from demolib.h by ctypesgen\n\n\ndef do_demo():\n    a = 1\n    b = 2\n    result = pydemolib.trivial_add(a, b)\n    print(\"a\", a)\n    print(\"b\", b)\n    print(\"result\", result)\n\n\ndef main(argv=None):\n    if argv is None:\n        argv = sys.argv\n\n    do_demo()\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "demo/demolib.c",
    "content": "/*\n** Trivial ctypesgen demo library\n**  from http://code.google.com/p/ctypesgen\n\nDumb manual build with:\n\n\n    gcc -fPIC -c demolib.c\n    gcc -shared -o demolib.so demolib.o\n\n    gcc -fPIC -shared -o demolib.so demolib.c\n\n*/\n\n#include \"demolib.h\"\n\nint trivial_add(int a, int b)\n{\n    return a + b;\n}\n"
  },
  {
    "path": "demo/demolib.h",
    "content": "/*\n** Trivial ctypesgen demo library\n**  from http://code.google.com/p/ctypesgen\n*/\n\nint trivial_add(int a, int b);\n"
  },
  {
    "path": "demo/pydemolib.py",
    "content": "r\"\"\"Wrapper for demolib.h\n\nGenerated with:\n../run.py -o pydemolib.py -l demolib.so demolib.h\n\nDo not modify this file.\n\"\"\"\n\n__docformat__ = \"restructuredtext\"\n\n# Begin preamble for Python\n\nimport ctypes\nimport sys\nfrom ctypes import *  # noqa: F401, F403\n\n_int_types = (ctypes.c_int16, ctypes.c_int32)\nif hasattr(ctypes, \"c_int64\"):\n    # Some builds of ctypes apparently do not have ctypes.c_int64\n    # defined; it's a pretty good bet that these builds do not\n    # have 64-bit pointers.\n    _int_types += (ctypes.c_int64,)\nfor t in _int_types:\n    if ctypes.sizeof(t) == ctypes.sizeof(ctypes.c_size_t):\n        c_ptrdiff_t = t\ndel t\ndel _int_types\n\n\n\nclass UserString:\n    def __init__(self, seq):\n        if isinstance(seq, bytes):\n            self.data = seq\n        elif isinstance(seq, UserString):\n            self.data = seq.data[:]\n        else:\n            self.data = str(seq).encode()\n\n    def __bytes__(self):\n        return self.data\n\n    def __str__(self):\n        return self.data.decode()\n\n    def __repr__(self):\n        return repr(self.data)\n\n    def __int__(self):\n        return int(self.data.decode())\n\n    def __long__(self):\n        return int(self.data.decode())\n\n    def __float__(self):\n        return float(self.data.decode())\n\n    def __complex__(self):\n        return complex(self.data.decode())\n\n    def __hash__(self):\n        return hash(self.data)\n\n    def __le__(self, string):\n        if isinstance(string, UserString):\n            return self.data <= string.data\n        else:\n            return self.data <= string\n\n    def __lt__(self, string):\n        if isinstance(string, UserString):\n            return self.data < string.data\n        else:\n            return self.data < string\n\n    def __ge__(self, string):\n        if isinstance(string, UserString):\n            return self.data >= string.data\n        else:\n            return self.data >= string\n\n    def __gt__(self, string):\n        if isinstance(string, UserString):\n            return self.data > string.data\n        else:\n            return self.data > string\n\n    def __eq__(self, string):\n        if isinstance(string, UserString):\n            return self.data == string.data\n        else:\n            return self.data == string\n\n    def __ne__(self, string):\n        if isinstance(string, UserString):\n            return self.data != string.data\n        else:\n            return self.data != string\n\n    def __contains__(self, char):\n        return char in self.data\n\n    def __len__(self):\n        return len(self.data)\n\n    def __getitem__(self, index):\n        return self.__class__(self.data[index])\n\n    def __getslice__(self, start, end):\n        start = max(start, 0)\n        end = max(end, 0)\n        return self.__class__(self.data[start:end])\n\n    def __add__(self, other):\n        if isinstance(other, UserString):\n            return self.__class__(self.data + other.data)\n        elif isinstance(other, bytes):\n            return self.__class__(self.data + other)\n        else:\n            return self.__class__(self.data + str(other).encode())\n\n    def __radd__(self, other):\n        if isinstance(other, bytes):\n            return self.__class__(other + self.data)\n        else:\n            return self.__class__(str(other).encode() + self.data)\n\n    def __mul__(self, n):\n        return self.__class__(self.data * n)\n\n    __rmul__ = __mul__\n\n    def __mod__(self, args):\n        return self.__class__(self.data % args)\n\n    # the following methods are defined in alphabetical order:\n    def capitalize(self):\n        return self.__class__(self.data.capitalize())\n\n    def center(self, width, *args):\n        return self.__class__(self.data.center(width, *args))\n\n    def count(self, sub, start=0, end=sys.maxsize):\n        return self.data.count(sub, start, end)\n\n    def decode(self, encoding=None, errors=None):  # XXX improve this?\n        if encoding:\n            if errors:\n                return self.__class__(self.data.decode(encoding, errors))\n            else:\n                return self.__class__(self.data.decode(encoding))\n        else:\n            return self.__class__(self.data.decode())\n\n    def encode(self, encoding=None, errors=None):  # XXX improve this?\n        if encoding:\n            if errors:\n                return self.__class__(self.data.encode(encoding, errors))\n            else:\n                return self.__class__(self.data.encode(encoding))\n        else:\n            return self.__class__(self.data.encode())\n\n    def endswith(self, suffix, start=0, end=sys.maxsize):\n        return self.data.endswith(suffix, start, end)\n\n    def expandtabs(self, tabsize=8):\n        return self.__class__(self.data.expandtabs(tabsize))\n\n    def find(self, sub, start=0, end=sys.maxsize):\n        return self.data.find(sub, start, end)\n\n    def index(self, sub, start=0, end=sys.maxsize):\n        return self.data.index(sub, start, end)\n\n    def isalpha(self):\n        return self.data.isalpha()\n\n    def isalnum(self):\n        return self.data.isalnum()\n\n    def isdecimal(self):\n        return self.data.isdecimal()\n\n    def isdigit(self):\n        return self.data.isdigit()\n\n    def islower(self):\n        return self.data.islower()\n\n    def isnumeric(self):\n        return self.data.isnumeric()\n\n    def isspace(self):\n        return self.data.isspace()\n\n    def istitle(self):\n        return self.data.istitle()\n\n    def isupper(self):\n        return self.data.isupper()\n\n    def join(self, seq):\n        return self.data.join(seq)\n\n    def ljust(self, width, *args):\n        return self.__class__(self.data.ljust(width, *args))\n\n    def lower(self):\n        return self.__class__(self.data.lower())\n\n    def lstrip(self, chars=None):\n        return self.__class__(self.data.lstrip(chars))\n\n    def partition(self, sep):\n        return self.data.partition(sep)\n\n    def replace(self, old, new, maxsplit=-1):\n        return self.__class__(self.data.replace(old, new, maxsplit))\n\n    def rfind(self, sub, start=0, end=sys.maxsize):\n        return self.data.rfind(sub, start, end)\n\n    def rindex(self, sub, start=0, end=sys.maxsize):\n        return self.data.rindex(sub, start, end)\n\n    def rjust(self, width, *args):\n        return self.__class__(self.data.rjust(width, *args))\n\n    def rpartition(self, sep):\n        return self.data.rpartition(sep)\n\n    def rstrip(self, chars=None):\n        return self.__class__(self.data.rstrip(chars))\n\n    def split(self, sep=None, maxsplit=-1):\n        return self.data.split(sep, maxsplit)\n\n    def rsplit(self, sep=None, maxsplit=-1):\n        return self.data.rsplit(sep, maxsplit)\n\n    def splitlines(self, keepends=0):\n        return self.data.splitlines(keepends)\n\n    def startswith(self, prefix, start=0, end=sys.maxsize):\n        return self.data.startswith(prefix, start, end)\n\n    def strip(self, chars=None):\n        return self.__class__(self.data.strip(chars))\n\n    def swapcase(self):\n        return self.__class__(self.data.swapcase())\n\n    def title(self):\n        return self.__class__(self.data.title())\n\n    def translate(self, *args):\n        return self.__class__(self.data.translate(*args))\n\n    def upper(self):\n        return self.__class__(self.data.upper())\n\n    def zfill(self, width):\n        return self.__class__(self.data.zfill(width))\n\n\nclass MutableString(UserString):\n    \"\"\"mutable string objects\n\n    Python strings are immutable objects.  This has the advantage, that\n    strings may be used as dictionary keys.  If this property isn't needed\n    and you insist on changing string values in place instead, you may cheat\n    and use MutableString.\n\n    But the purpose of this class is an educational one: to prevent\n    people from inventing their own mutable string class derived\n    from UserString and than forget thereby to remove (override) the\n    __hash__ method inherited from UserString.  This would lead to\n    errors that would be very hard to track down.\n\n    A faster and better solution is to rewrite your program using lists.\"\"\"\n\n    def __init__(self, string=\"\"):\n        self.data = string\n\n    def __hash__(self):\n        raise TypeError(\"unhashable type (it is mutable)\")\n\n    def __setitem__(self, index, sub):\n        if index < 0:\n            index += len(self.data)\n        if index < 0 or index >= len(self.data):\n            raise IndexError\n        self.data = self.data[:index] + sub + self.data[index + 1 :]\n\n    def __delitem__(self, index):\n        if index < 0:\n            index += len(self.data)\n        if index < 0 or index >= len(self.data):\n            raise IndexError\n        self.data = self.data[:index] + self.data[index + 1 :]\n\n    def __setslice__(self, start, end, sub):\n        start = max(start, 0)\n        end = max(end, 0)\n        if isinstance(sub, UserString):\n            self.data = self.data[:start] + sub.data + self.data[end:]\n        elif isinstance(sub, bytes):\n            self.data = self.data[:start] + sub + self.data[end:]\n        else:\n            self.data = self.data[:start] + str(sub).encode() + self.data[end:]\n\n    def __delslice__(self, start, end):\n        start = max(start, 0)\n        end = max(end, 0)\n        self.data = self.data[:start] + self.data[end:]\n\n    def immutable(self):\n        return UserString(self.data)\n\n    def __iadd__(self, other):\n        if isinstance(other, UserString):\n            self.data += other.data\n        elif isinstance(other, bytes):\n            self.data += other\n        else:\n            self.data += str(other).encode()\n        return self\n\n    def __imul__(self, n):\n        self.data *= n\n        return self\n\n\nclass String(MutableString, ctypes.Union):\n\n    _fields_ = [(\"raw\", ctypes.POINTER(ctypes.c_char)), (\"data\", ctypes.c_char_p)]\n\n    def __init__(self, obj=b\"\"):\n        if isinstance(obj, (bytes, UserString)):\n            self.data = bytes(obj)\n        else:\n            self.raw = obj\n\n    def __len__(self):\n        return self.data and len(self.data) or 0\n\n    def from_param(cls, obj):\n        # Convert None or 0\n        if obj is None or obj == 0:\n            return cls(ctypes.POINTER(ctypes.c_char)())\n\n        # Convert from String\n        elif isinstance(obj, String):\n            return obj\n\n        # Convert from bytes\n        elif isinstance(obj, bytes):\n            return cls(obj)\n\n        # Convert from str\n        elif isinstance(obj, str):\n            return cls(obj.encode())\n\n        # Convert from c_char_p\n        elif isinstance(obj, ctypes.c_char_p):\n            return obj\n\n        # Convert from POINTER(ctypes.c_char)\n        elif isinstance(obj, ctypes.POINTER(ctypes.c_char)):\n            return obj\n\n        # Convert from raw pointer\n        elif isinstance(obj, int):\n            return cls(ctypes.cast(obj, ctypes.POINTER(ctypes.c_char)))\n\n        # Convert from ctypes.c_char array\n        elif isinstance(obj, ctypes.c_char * len(obj)):\n            return obj\n\n        # Convert from object\n        else:\n            return String.from_param(obj._as_parameter_)\n\n    from_param = classmethod(from_param)\n\n\ndef ReturnString(obj, func=None, arguments=None):\n    return String.from_param(obj)\n\n\n# As of ctypes 1.0, ctypes does not support custom error-checking\n# functions on callbacks, nor does it support custom datatypes on\n# callbacks, so we must ensure that all callbacks return\n# primitive datatypes.\n#\n# Non-primitive return values wrapped with UNCHECKED won't be\n# typechecked, and will be converted to ctypes.c_void_p.\ndef UNCHECKED(type):\n    if hasattr(type, \"_type_\") and isinstance(type._type_, str) and type._type_ != \"P\":\n        return type\n    else:\n        return ctypes.c_void_p\n\n\n# ctypes doesn't have direct support for variadic functions, so we have to write\n# our own wrapper class\nclass _variadic_function(object):\n    def __init__(self, func, restype, argtypes, errcheck):\n        self.func = func\n        self.func.restype = restype\n        self.argtypes = argtypes\n        if errcheck:\n            self.func.errcheck = errcheck\n\n    def _as_parameter_(self):\n        # So we can pass this variadic function as a function pointer\n        return self.func\n\n    def __call__(self, *args):\n        fixed_args = []\n        i = 0\n        for argtype in self.argtypes:\n            # Typecheck what we can\n            fixed_args.append(argtype.from_param(args[i]))\n            i += 1\n        return self.func(*fixed_args + list(args[i:]))\n\n\ndef ord_if_char(value):\n    \"\"\"\n    Simple helper used for casts to simple builtin types:  if the argument is a\n    string type, it will be converted to it's ordinal value.\n\n    This function will raise an exception if the argument is string with more\n    than one characters.\n    \"\"\"\n    return ord(value) if (isinstance(value, bytes) or isinstance(value, str)) else value\n\n# End preamble\n\n_libs = {}\n_libdirs = []\n\n# Begin loader\n\n\"\"\"\nLoad libraries - appropriately for all our supported platforms\n\"\"\"\n# ----------------------------------------------------------------------------\n# Copyright (c) 2008 David James\n# Copyright (c) 2006-2008 Alex Holkner\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n#\n#  * Redistributions of source code must retain the above copyright\n#    notice, this list of conditions and the following disclaimer.\n#  * Redistributions in binary form must reproduce the above copyright\n#    notice, this list of conditions and the following disclaimer in\n#    the documentation and/or other materials provided with the\n#    distribution.\n#  * Neither the name of pyglet nor the names of its\n#    contributors may be used to endorse or promote products\n#    derived from this software without specific prior written\n#    permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\n# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n# ----------------------------------------------------------------------------\n\nimport ctypes\nimport ctypes.util\nimport glob\nimport os.path\nimport platform\nimport re\nimport sys\n\n\ndef _environ_path(name):\n    \"\"\"Split an environment variable into a path-like list elements\"\"\"\n    if name in os.environ:\n        return os.environ[name].split(\":\")\n    return []\n\n\nclass LibraryLoader:\n    \"\"\"\n    A base class For loading of libraries ;-)\n    Subclasses load libraries for specific platforms.\n    \"\"\"\n\n    # library names formatted specifically for platforms\n    name_formats = [\"%s\"]\n\n    class Lookup:\n        \"\"\"Looking up calling conventions for a platform\"\"\"\n\n        mode = ctypes.DEFAULT_MODE\n\n        def __init__(self, path):\n            super(LibraryLoader.Lookup, self).__init__()\n            self.access = dict(cdecl=ctypes.CDLL(path, self.mode))\n\n        def get(self, name, calling_convention=\"cdecl\"):\n            \"\"\"Return the given name according to the selected calling convention\"\"\"\n            if calling_convention not in self.access:\n                raise LookupError(\n                    \"Unknown calling convention '{}' for function '{}'\".format(\n                        calling_convention, name\n                    )\n                )\n            return getattr(self.access[calling_convention], name)\n\n        def has(self, name, calling_convention=\"cdecl\"):\n            \"\"\"Return True if this given calling convention finds the given 'name'\"\"\"\n            if calling_convention not in self.access:\n                return False\n            return hasattr(self.access[calling_convention], name)\n\n        def __getattr__(self, name):\n            return getattr(self.access[\"cdecl\"], name)\n\n    def __init__(self):\n        self.other_dirs = []\n\n    def __call__(self, libname):\n        \"\"\"Given the name of a library, load it.\"\"\"\n        paths = self.getpaths(libname)\n\n        for path in paths:\n            # noinspection PyBroadException\n            try:\n                return self.Lookup(path)\n            except Exception:  # pylint: disable=broad-except\n                pass\n\n        raise ImportError(\"Could not load %s.\" % libname)\n\n    def getpaths(self, libname):\n        \"\"\"Return a list of paths where the library might be found.\"\"\"\n        if os.path.isabs(libname):\n            yield libname\n        else:\n            # search through a prioritized series of locations for the library\n\n            # we first search any specific directories identified by user\n            for dir_i in self.other_dirs:\n                for fmt in self.name_formats:\n                    # dir_i should be absolute already\n                    yield os.path.join(dir_i, fmt % libname)\n\n            # check if this code is even stored in a physical file\n            try:\n                this_file = __file__\n            except NameError:\n                this_file = None\n\n            # then we search the directory where the generated python interface is stored\n            if this_file is not None:\n                for fmt in self.name_formats:\n                    yield os.path.abspath(os.path.join(os.path.dirname(__file__), fmt % libname))\n\n            # now, use the ctypes tools to try to find the library\n            for fmt in self.name_formats:\n                path = ctypes.util.find_library(fmt % libname)\n                if path:\n                    yield path\n\n            # then we search all paths identified as platform-specific lib paths\n            for path in self.getplatformpaths(libname):\n                yield path\n\n            # Finally, we'll try the users current working directory\n            for fmt in self.name_formats:\n                yield os.path.abspath(os.path.join(os.path.curdir, fmt % libname))\n\n    def getplatformpaths(self, _libname):  # pylint: disable=no-self-use\n        \"\"\"Return all the library paths available in this platform\"\"\"\n        return []\n\n\n# Darwin (Mac OS X)\n\n\nclass DarwinLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for MacOS\"\"\"\n\n    name_formats = [\n        \"lib%s.dylib\",\n        \"lib%s.so\",\n        \"lib%s.bundle\",\n        \"%s.dylib\",\n        \"%s.so\",\n        \"%s.bundle\",\n        \"%s\",\n    ]\n\n    class Lookup(LibraryLoader.Lookup):\n        \"\"\"\n        Looking up library files for this platform (Darwin aka MacOS)\n        \"\"\"\n\n        # Darwin requires dlopen to be called with mode RTLD_GLOBAL instead\n        # of the default RTLD_LOCAL.  Without this, you end up with\n        # libraries not being loadable, resulting in \"Symbol not found\"\n        # errors\n        mode = ctypes.RTLD_GLOBAL\n\n    def getplatformpaths(self, libname):\n        if os.path.pathsep in libname:\n            names = [libname]\n        else:\n            names = [fmt % libname for fmt in self.name_formats]\n\n        for directory in self.getdirs(libname):\n            for name in names:\n                yield os.path.join(directory, name)\n\n    @staticmethod\n    def getdirs(libname):\n        \"\"\"Implements the dylib search as specified in Apple documentation:\n\n        http://developer.apple.com/documentation/DeveloperTools/Conceptual/\n            DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html\n\n        Before commencing the standard search, the method first checks\n        the bundle's ``Frameworks`` directory if the application is running\n        within a bundle (OS X .app).\n        \"\"\"\n\n        dyld_fallback_library_path = _environ_path(\"DYLD_FALLBACK_LIBRARY_PATH\")\n        if not dyld_fallback_library_path:\n            dyld_fallback_library_path = [\n                os.path.expanduser(\"~/lib\"),\n                \"/usr/local/lib\",\n                \"/usr/lib\",\n            ]\n\n        dirs = []\n\n        if \"/\" in libname:\n            dirs.extend(_environ_path(\"DYLD_LIBRARY_PATH\"))\n        else:\n            dirs.extend(_environ_path(\"LD_LIBRARY_PATH\"))\n            dirs.extend(_environ_path(\"DYLD_LIBRARY_PATH\"))\n            dirs.extend(_environ_path(\"LD_RUN_PATH\"))\n\n        if hasattr(sys, \"frozen\") and getattr(sys, \"frozen\") == \"macosx_app\":\n            dirs.append(os.path.join(os.environ[\"RESOURCEPATH\"], \"..\", \"Frameworks\"))\n\n        dirs.extend(dyld_fallback_library_path)\n\n        return dirs\n\n\n# Posix\n\n\nclass PosixLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for POSIX-like systems (including Linux)\"\"\"\n\n    _ld_so_cache = None\n\n    _include = re.compile(r\"^\\s*include\\s+(?P<pattern>.*)\")\n\n    name_formats = [\"lib%s.so\", \"%s.so\", \"%s\"]\n\n    class _Directories(dict):\n        \"\"\"Deal with directories\"\"\"\n\n        def __init__(self):\n            dict.__init__(self)\n            self.order = 0\n\n        def add(self, directory):\n            \"\"\"Add a directory to our current set of directories\"\"\"\n            if len(directory) > 1:\n                directory = directory.rstrip(os.path.sep)\n            # only adds and updates order if exists and not already in set\n            if not os.path.exists(directory):\n                return\n            order = self.setdefault(directory, self.order)\n            if order == self.order:\n                self.order += 1\n\n        def extend(self, directories):\n            \"\"\"Add a list of directories to our set\"\"\"\n            for a_dir in directories:\n                self.add(a_dir)\n\n        def ordered(self):\n            \"\"\"Sort the list of directories\"\"\"\n            return (i[0] for i in sorted(self.items(), key=lambda d: d[1]))\n\n    def _get_ld_so_conf_dirs(self, conf, dirs):\n        \"\"\"\n        Recursive function to help parse all ld.so.conf files, including proper\n        handling of the `include` directive.\n        \"\"\"\n\n        try:\n            with open(conf) as fileobj:\n                for dirname in fileobj:\n                    dirname = dirname.strip()\n                    if not dirname:\n                        continue\n\n                    match = self._include.match(dirname)\n                    if not match:\n                        dirs.add(dirname)\n                    else:\n                        for dir2 in glob.glob(match.group(\"pattern\")):\n                            self._get_ld_so_conf_dirs(dir2, dirs)\n        except IOError:\n            pass\n\n    def _create_ld_so_cache(self):\n        # Recreate search path followed by ld.so.  This is going to be\n        # slow to build, and incorrect (ld.so uses ld.so.cache, which may\n        # not be up-to-date).  Used only as fallback for distros without\n        # /sbin/ldconfig.\n        #\n        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.\n\n        directories = self._Directories()\n        for name in (\n            \"LD_LIBRARY_PATH\",\n            \"SHLIB_PATH\",  # HP-UX\n            \"LIBPATH\",  # OS/2, AIX\n            \"LIBRARY_PATH\",  # BE/OS\n        ):\n            if name in os.environ:\n                directories.extend(os.environ[name].split(os.pathsep))\n\n        self._get_ld_so_conf_dirs(\"/etc/ld.so.conf\", directories)\n\n        bitage = platform.architecture()[0]\n\n        unix_lib_dirs_list = []\n        if bitage.startswith(\"64\"):\n            # prefer 64 bit if that is our arch\n            unix_lib_dirs_list += [\"/lib64\", \"/usr/lib64\"]\n\n        # must include standard libs, since those paths are also used by 64 bit\n        # installs\n        unix_lib_dirs_list += [\"/lib\", \"/usr/lib\"]\n        if sys.platform.startswith(\"linux\"):\n            # Try and support multiarch work in Ubuntu\n            # https://wiki.ubuntu.com/MultiarchSpec\n            if bitage.startswith(\"32\"):\n                # Assume Intel/AMD x86 compat\n                unix_lib_dirs_list += [\"/lib/i386-linux-gnu\", \"/usr/lib/i386-linux-gnu\"]\n            elif bitage.startswith(\"64\"):\n                # Assume Intel/AMD x86 compatible\n                unix_lib_dirs_list += [\n                    \"/lib/x86_64-linux-gnu\",\n                    \"/usr/lib/x86_64-linux-gnu\",\n                ]\n            else:\n                # guess...\n                unix_lib_dirs_list += glob.glob(\"/lib/*linux-gnu\")\n        directories.extend(unix_lib_dirs_list)\n\n        cache = {}\n        lib_re = re.compile(r\"lib(.*)\\.s[ol]\")\n        # ext_re = re.compile(r\"\\.s[ol]$\")\n        for our_dir in directories.ordered():\n            try:\n                for path in glob.glob(\"%s/*.s[ol]*\" % our_dir):\n                    file = os.path.basename(path)\n\n                    # Index by filename\n                    cache_i = cache.setdefault(file, set())\n                    cache_i.add(path)\n\n                    # Index by library name\n                    match = lib_re.match(file)\n                    if match:\n                        library = match.group(1)\n                        cache_i = cache.setdefault(library, set())\n                        cache_i.add(path)\n            except OSError:\n                pass\n\n        self._ld_so_cache = cache\n\n    def getplatformpaths(self, libname):\n        if self._ld_so_cache is None:\n            self._create_ld_so_cache()\n\n        result = self._ld_so_cache.get(libname, set())\n        for i in result:\n            # we iterate through all found paths for library, since we may have\n            # actually found multiple architectures or other library types that\n            # may not load\n            yield i\n\n\n# Windows\n\n\nclass WindowsLibraryLoader(LibraryLoader):\n    \"\"\"Library loader for Microsoft Windows\"\"\"\n\n    name_formats = [\"%s.dll\", \"lib%s.dll\", \"%slib.dll\", \"%s\"]\n\n    class Lookup(LibraryLoader.Lookup):\n        \"\"\"Lookup class for Windows libraries...\"\"\"\n\n        def __init__(self, path):\n            super(WindowsLibraryLoader.Lookup, self).__init__(path)\n            self.access[\"stdcall\"] = ctypes.windll.LoadLibrary(path)\n\n\n# Platform switching\n\n# If your value of sys.platform does not appear in this dict, please contact\n# the Ctypesgen maintainers.\n\nloaderclass = {\n    \"darwin\": DarwinLibraryLoader,\n    \"cygwin\": WindowsLibraryLoader,\n    \"win32\": WindowsLibraryLoader,\n    \"msys\": WindowsLibraryLoader,\n}\n\nload_library = loaderclass.get(sys.platform, PosixLibraryLoader)()\n\n\ndef add_library_search_dirs(other_dirs):\n    \"\"\"\n    Add libraries to search paths.\n    If library paths are relative, convert them to absolute with respect to this\n    file's directory\n    \"\"\"\n    for path in other_dirs:\n        if not os.path.isabs(path):\n            path = os.path.abspath(path)\n        load_library.other_dirs.append(path)\n\n\ndel loaderclass\n\n# End loader\n\nadd_library_search_dirs([])\n\n# Begin libraries\n_libs[\"demolib.so\"] = load_library(\"demolib.so\")\n\n# 1 libraries\n# End libraries\n\n# No modules\n\n# /home/olsonse/src/ctypesgen/demo/demolib.h: 6\nif _libs[\"demolib.so\"].has(\"trivial_add\", \"cdecl\"):\n    trivial_add = _libs[\"demolib.so\"].get(\"trivial_add\", \"cdecl\")\n    trivial_add.argtypes = [c_int, c_int]\n    trivial_add.restype = c_int\n\n# No inserted files\n\n# No prefix-stripping\n\n"
  },
  {
    "path": "docs/publishing.md",
    "content": "# How to Publish a New Release\n\n## Versioning\n\nVersioning within ctypesgen follows these general rules:\n\n* Versions are all defined with specific reference to a commit that is relative\n  to a Git tag.\n* Versions numbers include enough information to find the exact commit that\n  represents the version release.\n* All tags should follow the format of:  x.y.z\n    * x : Major revision with major differences of capabilities as compared to\n          other major revisions.  The definition of \"major capabilities\" is a\n          somewhat subjective concept, dependent on the developers.\n    * y : Minor revision with incompatible differences of interfaces as compared\n          to earlier revisions.  Interfaces that are considered to impact the\n          minor revision number are external interfaces such as the command line\n          or perhaps python version support.\n    * z : Micro revision indicating a general acceptance of multiple patches\n          since last tag. This number may be used to help mark minor development\n          milestones.\n\nBy using the Git command `git describe`, a unique identifier of the full version\nstring can be shown as:\n\n  * x.y.z[-n-g*sha1*]\n    where [-n-g*sha1*] shows up *automatically* if changes have been made since\n    the last tag\n  * n : Indicates the number of commits since the last tag\n  * g*sha1*: Indicates the abreviated SHA1 hash of the latest commit\n\nThus, the version *1.0.0-2* means that the last tag before that version was\n*1.0.0* and the version *1.0.0-2* is exactly 2 commits after the tag *1.0.0*.\n\nTo re-baseline the [-n-g*sha1*] portion showing up in `git describe` (i.e.\nremove it until another commit is added), we simply add another tag following\nthe *x.y.z* format.\n\n:exclamation: The version set for release should comply with\n[PEP 440](https://peps.python.org/pep-0440/).\n\n## Last changes on repo\n\n```bash\n# Update content and set version of latest changes in CHANGELOG.md\nvim CHANGELOG.md\n...\ngit commit -a -S\n\nversion=\"2.2.5\"\ncommit=$(git rev-parse HEAD)\ntag_message=\"ctypesgen v${version}\"\ngit tag -a -m \"$tag_message\" $version $commit\n\ngit push -u --tags <repo> <branch>\n```\n\n## Publish on GitHub\n\n- Go to <https://github.com/ctypesgen/ctypesgen/releases/new>.\n- Choose the newly created tag and fill in title (preferable in the format of\n  'ctypesgen v.X.Y.Z') and description (if appropriate, use the content for this\n  version listed in CHANGELOG.md).\n- Publish.\n\n## Post-release\n\nAfter the release, a new headline `### Unreleased` should be added at the top\nof the file `CHANGELOG.md`.\n"
  },
  {
    "path": "pyproject.toml",
    "content": "[build-system]\nrequires = [\"setuptools>=64\", \"setuptools_scm>=7.1\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"ctypesgen\"\ndescription = \"Python wrapper generator for ctypes\"\nlicense = { text = \"BSD-2-Clause\" }\nclassifiers = [\n    \"License :: OSI Approved :: BSD License\",\n    \"Programming Language :: Python :: 3\",\n    \"Programming Language :: Python :: 3.7\",\n    \"Programming Language :: Python :: 3.8\",\n    \"Programming Language :: Python :: 3.9\",\n    \"Programming Language :: Python :: 3.10\",\n    \"Programming Language :: Python :: 3 :: Only\",\n    \"Development Status :: 4 - Beta\",\n    \"Operating System :: OS Independent\",\n    \"Intended Audience :: Developers\",\n    \"Topic :: Software Development :: Code Generators\",\n    \"Topic :: Software Development :: Pre-processors\",\n    \"Topic :: Software Development :: Build Tools\",\n    \"Environment :: Console\",\n]\ndynamic = [\"readme\", \"version\"]\nrequires-python = \">=3.7\"\n\n[project.urls]\nHomepage = \"https://github.com/ctypesgen/ctypesgen\"\nRepository = \"https://github.com/ctypesgen/ctypesgen.git\"\nIssues = \"https://github.com/ctypesgen/ctypesgen/issues\"\nChangelog = \"https://github.com/ctypesgen/ctypesgen/blob/master/CHANGELOG.md\"\n\n[project.scripts]\nctypesgen = \"ctypesgen.__main__:main\"\n\n[tool.setuptools]\npackage-dir = {}\ninclude-package-data = true\n\n[tool.setuptools.packages.find]\nexclude = [\"tests*\"]\nnamespaces = false\n\n[tool.setuptools.dynamic.readme]\nfile = [\"README.md\", \"LICENSE\", \"CHANGELOG.md\"]\ncontent-type = \"text/markdown\"\n\n[tool.setuptools_scm]\nwrite_to = \"ctypesgen/VERSION\"\nwrite_to_template = \"{version}\"\n\n[tool.black]\nline-length = 100\nexclude = '.*tab.py|ctypesgen/parser/cgrammar.py|ctypesgen/parser/lex.py|ctypesgen/parser/yacc.py'\ntarget-version = [\"py37\", \"py38\", \"py39\"]\n"
  },
  {
    "path": "run.py",
    "content": "#!/usr/bin/env python3\n\nimport sys\nimport os\n\n# ensure that we can load the ctypesgen library\nTHIS_DIR = os.path.dirname(__file__)\nsys.path.insert(0, THIS_DIR)\n\nfrom ctypesgen.__main__ import main  # noqa: E402\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python3\n\nfrom setuptools import setup\n\nif __name__ == \"__main__\":\n    setup()\n"
  },
  {
    "path": "tests/.gitignore",
    "content": "temp.h\ntemp.py\ncommon/\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ctypesgentest.py",
    "content": "\"\"\"ctypesgentest is a simple module for testing ctypesgen on various C constructs.\n\nIt consists of a single function, test(). test() takes a string that represents\na C header file, along with some keyword arguments representing options. It\nprocesses the header using ctypesgen and returns a tuple containing the\nresulting module object and the output that ctypesgen produced.\n\"\"\"\n\nimport os\nimport sys\nfrom io import StringIO\nimport glob\nimport json\nfrom contextlib import contextmanager\nimport types\nimport subprocess\nfrom shutil import rmtree\n\nfrom ctypesgen import options, messages, parser, processor\nfrom ctypesgen import printer_python, printer_json, VERSION\n\nmodule_factory = types.ModuleType\n\n# set redirect_stdout to False if using console based debugger like pdb\nredirect_stdout = True\n\n\n@contextmanager\ndef redirect(stdout=sys.stdout):\n    backup = sys.stdout\n    sys.stdout = stdout\n    try:\n        yield stdout\n    finally:\n        sys.stdout = backup\n\n\ndef generate(header, **more_options):\n\n    assert isinstance(header, str)\n    with open(\"temp.h\", \"wb\") as f:\n        f.write(header.encode('utf-8'))\n\n    test_options = options.get_default_options()\n    test_options.headers = [\"temp.h\"]\n    for opt, val in more_options.items():\n        setattr(test_options, opt, val)\n\n    if redirect_stdout:\n        # Redirect output\n        sys.stdout = StringIO()\n\n    # Step 1: Parse\n    descriptions = parser.parse(test_options.headers, test_options)\n\n    # Step 2: Process\n    processor.process(descriptions, test_options)\n\n    # Step 3: Print\n    if test_options.output_language.startswith(\"py\"):\n\n        def module_from_code(name, python_code):\n            module = module_factory(name)\n            exec(python_code, module.__dict__)\n            return module\n\n        # we have to redirect stdout, as WrapperPrinter is only able to write\n        # to files or stdout\n        with redirect(stdout=StringIO()) as printer_output:\n            # do not discard WrapperPrinter object, as the target file gets\n            # closed on printer deletion\n            _ = printer_python.WrapperPrinter(None, test_options, descriptions)\n            generated_python_code = printer_output.getvalue()\n            module = module_from_code(\"temp\", generated_python_code)\n            retval = module\n\n    elif test_options.output_language == \"json\":\n        with redirect(stdout=StringIO()) as printer_output:\n            # do not discard WrapperPrinter object, as the target file gets\n            # closed on printer deletion\n            _ = printer_json.WrapperPrinter(None, test_options, descriptions)\n            JSON = json.loads(printer_output.getvalue())\n            retval = JSON\n    else:\n        raise RuntimeError(\"No such output language `\" + test_options.output_language + \"'\")\n\n    if redirect_stdout:\n        # Un-redirect output\n        output = sys.stdout.getvalue()\n        sys.stdout.close()\n        sys.stdout = sys.__stdout__\n    else:\n        output = \"\"\n\n    return retval, output\n\n\ndef cleanup(filepattern=\"temp.*\"):\n    fnames = glob.glob(filepattern)\n    for fname in fnames:\n        os.unlink(fname)\n\n\ndef set_logging_level(log_level):\n    messages.log.setLevel(log_level)\n\n\ndef ctypesgen_version():\n    return VERSION\n\n\ndef sort_anon_fn(anon_tag):\n    return int(anon_tag.split(\"_\")[1])\n\n\nclass JsonHelper:\n    \"\"\"\n    Utility class preparing generated JSON result for testing.\n\n    JSON stores the path to some source items. These need to be genericized in\n    order for tests to succeed on all machines/user accounts. This is also the\n    case for \"anon_\" tags, which are \"reset\" for each test to start from\n    \"anon_1\".\n    \"\"\"\n\n    def __init__(self):\n        self.anons = list()\n\n    def prepare(self, json):\n        \"\"\"Prepares generated JSON result for testing\"\"\"\n        self._search_anon_tags(json)\n        unique_list = list(set(self.anons))\n        unique_sorted_list = sorted(unique_list, key=sort_anon_fn)\n\n        mapped_tags = dict()\n        counter = 1\n        for i in unique_sorted_list:\n            mapped_tags[i] = \"anon_{0}\".format(counter)\n            counter += 1\n\n        for (old_tag, new_tag) in mapped_tags.items():\n            self._replace_anon_tag(json, old_tag, new_tag)\n\n    def _replace_anon_tag(self, json, tag, new_tag):\n        \"\"\"Replaces source paths and resets anon_ tags to increment from 1\"\"\"\n        if isinstance(json, list):\n            for item in json:\n                self._replace_anon_tag(item, tag, new_tag)\n            return\n        if isinstance(json, dict):\n            for key, value in json.items():\n                if key == \"name\" and isinstance(value, str):\n                    if value == tag:\n                        json[key] = new_tag\n                elif key == \"tag\" and isinstance(value, str):\n                    if value == tag:\n                        json[key] = new_tag\n                elif key == \"src\" and isinstance(value, list):\n                    if value and \"temp.h\" in value[0]:\n                        value[0] = \"/some-path/temp.h\"\n                else:\n                    self._replace_anon_tag(value, tag, new_tag)\n\n    def _search_anon_tags(self, json):\n        \"\"\"Search for anon_ tags\"\"\"\n        if isinstance(json, list):\n            for item in json:\n                self._search_anon_tags(item)\n            return\n        if isinstance(json, dict):\n            for key, value in json.items():\n                if key == \"name\" and isinstance(value, str):\n                    if value.startswith(\"anon_\"):\n                        self.anons.append(value)\n                else:\n                    self._search_anon_tags(value)\n\n\n#\n# Functions facilitating tests of use of cross inclusion\n#\n\n\nCOMMON_DIR = os.path.join(os.path.dirname(__file__), \"common\")\n\n\ndef generate_common():\n    common_lib = \"libcommon.dll\" if sys.platform == \"win32\" else \"libcommon.so\"\n\n    _create_common_files()\n\n    _compile_common(common_lib)\n\n    for file_name in [\"a\", \"b\"]:\n        _generate_common(file_name, common_lib)\n\n    for file_name in [\"a\", \"b\"]:\n        _generate_common(file_name, common_lib, False)\n\n\ndef cleanup_common():\n    # Attention: currently not working on MS Windows.\n    # cleanup_common() tries to delete \"common.dll\" while it is still loaded\n    # by ctypes. See unittest for further details.\n    rmtree(COMMON_DIR)\n\n\ndef _compile_common(common_lib):\n    subprocess.run([\"gcc\", \"-c\", f\"{COMMON_DIR}/a.c\", \"-o\", f\"{COMMON_DIR}/a.o\"])\n    subprocess.run([\"gcc\", \"-c\", f\"{COMMON_DIR}/b.c\", \"-o\", f\"{COMMON_DIR}/b.o\"])\n    subprocess.run(\n        [\n            \"gcc\",\n            \"-shared\",\n            \"-o\",\n            f\"{COMMON_DIR}/{common_lib}\",\n            f\"{COMMON_DIR}/a.o\",\n            f\"{COMMON_DIR}/b.o\",\n        ]\n    )\n\n\ndef _generate_common(file_name, common_lib, embed_preamble=True):\n    test_options = options.get_default_options()\n    test_options.headers = [f\"{COMMON_DIR}/{file_name}.h\"]\n    test_options.include_search_paths = [COMMON_DIR]\n    test_options.libraries = [common_lib]\n    test_options.compile_libdirs = [COMMON_DIR]\n    test_options.embed_preamble = embed_preamble\n    if embed_preamble:\n        output = f\"{COMMON_DIR}/{file_name}.py\"\n    else:\n        output = f\"{COMMON_DIR}/{file_name}2.py\"\n\n    descriptions = parser.parse(test_options.headers, test_options)\n    processor.process(descriptions, test_options)\n    printer_python.WrapperPrinter(output, test_options, descriptions)\n\n\ndef _create_common_files():\n    a_h = \"\"\"#include \"common.h\"\n\nvoid foo(struct mystruct *m);\n\n\"\"\"\n    a_c = \"\"\"#include \"a.h\"\n\nvoid foo(struct mystruct *m) {\n\n}\n\n\"\"\"\n    b_h = \"\"\"#include \"common.h\"\n\nvoid bar(struct mystruct *m);\n\n\"\"\"\n    b_c = \"\"\"#include \"b.h\"\n\nvoid bar(struct mystruct *m) {\n\n}\n\n\"\"\"\n    common_h = \"\"\"struct mystruct {\n    int a;\n};\n\n\"\"\"\n\n    try:\n        os.mkdir(COMMON_DIR)\n    except FileExistsError:\n        rmtree(COMMON_DIR)\n        os.mkdir(COMMON_DIR)\n\n    names = {\"a.h\": a_h, \"a.c\": a_c, \"b.h\": b_h, \"b.c\": b_c, \"common.h\": common_h}\n    for (name, source) in names.items():\n        with open(f\"{COMMON_DIR}/{name}\", \"w\") as f:\n            f.write(source)\n"
  },
  {
    "path": "tests/testsuite.py",
    "content": "#!/usr/bin/env python3\n\"\"\"Simple test suite using unittest.\nBy clach04 (Chris Clark).\n\nCalling:\n\n    python3 -m unittest tests.testsuite\n\n    Calling a specific test only:\n\n    python3 -m unittest tests.testsuite.[TestCase class].[test name]\n    e.g.:\n    python3 -m unittest tests.testsuite.StdBoolTest.test_stdbool_type\n\nor\n    pytest -v  --showlocals tests/testsuite.py\n\n    pytest -v  --showlocals tests/testsuite.py::StdBoolTest::test_stdbool_type\n\nCould use any unitest compatible test runner (nose, etc.)\n\nAims to test for regressions. Where possible use stdlib to\navoid the need to compile C code.\n\"\"\"\n\nimport sys\nimport os\nimport ctypes\nimport json as JSON\nimport math\nimport unittest\nimport logging\nfrom subprocess import Popen, PIPE\n\nfrom tests.ctypesgentest import (\n    cleanup,\n    cleanup_common,\n    ctypesgen_version,\n    generate,\n    generate_common,\n    JsonHelper,\n    set_logging_level,\n)\n\nTEST_DIR = os.path.abspath(os.path.dirname(__file__))\n\n\ndef compare_json(test_instance, json, json_ans, verbose=False):\n    json_helper = JsonHelper()\n    json_helper.prepare(json)\n\n    print_excess = False\n    try:\n        test_instance.assertEqual(len(json), len(json_ans))\n    except Exception:\n        if verbose:\n            print(\n                \"JSONs do not have same length: \",\n                len(json),\n                \"generated vs\",\n                len(json_ans),\n                \"stored\",\n            )\n            print_excess = True\n        else:\n            raise\n\n    # first fix paths that exist inside JSON to avoid user-specific paths:\n    for i, ith_json_ans in zip(json, json_ans):\n        try:\n            test_instance.assertEqual(i, ith_json_ans)\n        except Exception:\n            if verbose:\n                print(\"\\nFailed JSON for: \", i[\"name\"])\n                print(\"GENERATED:\\n\", i, \"\\nANS:\\n\", ith_json_ans)\n                print(\"FAILED FOR================\", JSON.dumps(i, indent=4))\n                print(\"GENERATED =============\", JSON.dumps(ith_json_ans, indent=4))\n            raise\n\n    if print_excess:\n        if len(json) > len(json_ans):\n            j, jlen, jlabel = json, len(json_ans), \"generated\"\n        else:\n            j, jlen, jlabel = json_ans, len(json), \"stored\"\n        import pprint\n\n        print(\"Excess JSON content from\", jlabel, \"content:\")\n        pprint.pprint(j[jlen:])\n\n\ndef compute_packed(modulo, fields):\n    packs = [\n        (\n            modulo * int(ctypes.sizeof(f) / modulo)\n            + modulo * (1 if (ctypes.sizeof(f) % modulo) else 0)\n        )\n        for f in fields\n    ]\n    return sum(packs)\n\n\nclass StdlibTest(unittest.TestCase):\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"#include <stdlib.h>\\n\"\n        if sys.platform == \"win32\":\n            # pick something from %windir%\\system32\\msvc*dll that include stdlib\n            libraries = [\"msvcrt.dll\"]\n            libraries = [\"msvcrt\"]\n        elif sys.platform.startswith(\"linux\"):\n            libraries = [\"libc.so.6\"]\n        else:\n            libraries = [\"libc\"]\n        cls.module, _ = generate(header_str, libraries=libraries, all_headers=True)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n    def test_getenv_returns_string(self):\n        \"\"\"Issue 8 - Regression for crash with 64 bit and bad strings on 32 bit.\n        See http://code.google.com/p/ctypesgen/issues/detail?id=8\n        Test that we get a valid (non-NULL, non-empty) string back\n        \"\"\"\n        module = StdlibTest.module\n\n        if sys.platform == \"win32\":\n            # Check a variable that is already set\n            env_var_name = (\n                \"USERNAME\"  # this is always set (as is windir, ProgramFiles, USERPROFILE, etc.)\n            )\n            expect_result = os.environ[env_var_name]\n            self.assertTrue(expect_result, \"this should not be None or empty\")\n            # reason for using an existing OS variable is that unless the\n            # MSVCRT dll imported is the exact same one that Python was\n            # built with you can't share structures, see\n            # http://msdn.microsoft.com/en-us/library/ms235460.aspx\n            # \"Potential Errors Passing CRT Objects Across DLL Boundaries\"\n        else:\n            env_var_name = \"HELLO\"\n            os.environ[env_var_name] = \"WORLD\"  # This doesn't work under win32\n            expect_result = \"WORLD\"\n\n        result = str(module.getenv(env_var_name))\n        self.assertEqual(expect_result, result)\n\n    def test_getenv_returns_null(self):\n        \"\"\"Related to issue 8. Test getenv of unset variable.\"\"\"\n        module = StdlibTest.module\n        env_var_name = \"NOT SET\"\n        expect_result = None\n        try:\n            # ensure variable is not set, ignoring not set errors\n            del os.environ[env_var_name]\n        except KeyError:\n            pass\n        result = module.getenv(env_var_name)\n        self.assertEqual(expect_result, result)\n\n\n# This test is currently not working on MS Windows. The reason is the call of\n# cleanup_common(), which tries to delete \"common.dll\" while it is still loaded\n# by ctypes.\n# \"common.dll\" gets loaded in multiple places: first, every call of\n# ctypesgen.processor.process() in generate_common() loads the dll to check\n# availability of the symbols mentioned in the header file.\n# In addition, each test imports the module generated by ctypesgen, which again\n# means the dll is loaded to memory.\n# On Linux/macOS this is no problem, as .so libaries can be overwritten/deleted\n# on file system while still loaded to memory.\n@unittest.skipIf(\n    sys.platform == \"win32\",\n    \"Currently not working on Windows. See code comment for details.\"\n)\nclass CommonHeaderTest(unittest.TestCase):\n    @classmethod\n    def setUpClass(cls):\n        generate_common()\n\n    @classmethod\n    def tearDownClass(cls):\n        cleanup_common()\n\n    @unittest.expectedFailure\n    def test_two_import_with_embedded_preamble(self):\n        from .common import a\n        from .common import b\n\n        m = b.struct_mystruct()\n        b.bar(ctypes.byref(m))\n        a.foo(ctypes.byref(m))\n\n    def test_one_import(self):\n        from .common import b\n\n        m = b.struct_mystruct()\n        b.bar(ctypes.byref(m))\n\n    def test_two_import(self):\n        from .common import a2\n        from .common import b2\n\n        m = b2.struct_mystruct()\n        b2.bar(ctypes.byref(m))\n        a2.foo(ctypes.byref(m))\n\n\nclass StdBoolTest(unittest.TestCase):\n    \"Test correct parsing and generation of bool type\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n#include <stdbool.h>\n\nstruct foo\n{\n    bool is_bar;\n    int a;\n};\n\"\"\"\n        cls.module, _ = generate(header_str)  # , all_headers=True)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n    def test_stdbool_type(self):\n        \"\"\"Test is bool is correctly parsed\"\"\"\n        module = StdBoolTest.module\n        struct_foo = module.struct_foo\n        self.assertEqual(struct_foo._fields_, [(\"is_bar\", ctypes.c_bool), (\"a\", ctypes.c_int)])\n\n\nclass IntTypesTest(unittest.TestCase):\n    \"Test correct parsing and generation of different integer types\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\nstruct int_types {\n    short t_short;\n    short int t_short_int;\n    unsigned short t_ushort;\n    unsigned short int t_ushort_int;\n    int t_int;\n    long t_long;\n    long int t_long_int;\n    long long t_long_long;\n    long long int t_long_long_int;\n    unsigned long long int t_u_long_long_int;\n    long int unsigned long t_long_int_u_long;\n};\n\"\"\"\n        cls.module, _ = generate(header_str)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n    def test_int_types(self):\n        \"\"\"Test if different integer types are correctly parsed\"\"\"\n        module = IntTypesTest.module\n        struct_int_types = module.struct_int_types\n        self.assertEqual(\n            struct_int_types._fields_,\n            [\n                (\"t_short\", ctypes.c_short),\n                (\"t_short_int\", ctypes.c_short),\n                (\"t_ushort\", ctypes.c_ushort),\n                (\"t_ushort_int\", ctypes.c_ushort),\n                (\"t_int\", ctypes.c_int),\n                (\"t_long\", ctypes.c_long),\n                (\"t_long_int\", ctypes.c_long),\n                (\"t_long_long\", ctypes.c_longlong),\n                (\"t_long_long_int\", ctypes.c_longlong),\n                (\"t_u_long_long_int\", ctypes.c_ulonglong),\n                (\"t_long_int_u_long\", ctypes.c_ulonglong),\n            ],\n        )\n\n\nclass SimpleMacrosTest(unittest.TestCase):\n    \"\"\"Based on simple_macros.py\"\"\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n#define A 1\n#define B(x,y) x+y\n#define C(a,b,c) a?b:c\n#define funny(x) \"funny\" #x\n#define multipler_macro(x,y) x*y\n#define minus_macro(x,y) x-y\n#define divide_macro(x,y) x/y\n#define mod_macro(x,y) x%y\n#define subcall_macro_simple(x) (A)\n#define subcall_macro_simple_plus(x) (A) + (x)\n#define subcall_macro_minus(x,y) minus_macro(x,y)\n#define subcall_macro_minus_plus(x,y,z) (minus_macro(x,y)) + (z)\n\"\"\"\n        cls.module, _ = generate(header_str)\n        cls.json, _ = generate(header_str, output_language=\"json\")\n\n    def _json(self, name):\n        for i in SimpleMacrosTest.json:\n            if i[\"name\"] == name:\n                return i\n        raise KeyError(\"Could not find JSON entry\")\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module, cls.json\n        cleanup()\n\n    def test_macro_constant_int(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        module, json = SimpleMacrosTest.module, self._json\n\n        self.assertEqual(module.A, 1)\n        self.assertEqual(json(\"A\"), {\"name\": \"A\", \"type\": \"macro\", \"value\": \"1\"})\n\n    def test_macro_addition_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"B\"),\n            {\"args\": [\"x\", \"y\"], \"body\": \"(x + y)\", \"name\": \"B\", \"type\": \"macro_function\"},\n        )\n\n    def test_macro_addition(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.B(2, 2), 4)\n\n    def test_macro_ternary_json(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        json = self._json\n\n        self.assertEqual(\n            json(\"C\"),\n            {\n                \"args\": [\"a\", \"b\", \"c\"],\n                \"body\": \"a and b or c\",\n                \"name\": \"C\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_ternary_true(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.C(True, 1, 2), 1)\n\n    def test_macro_ternary_false(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.C(False, 1, 2), 2)\n\n    def test_macro_ternary_true_complex(self):\n        \"\"\"Test ?: with true, using values that can not be confused between True and 1\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.C(True, 99, 100), 99)\n\n    def test_macro_ternary_false_complex(self):\n        \"\"\"Test ?: with false, using values that can not be confused between True and 1\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.C(False, 99, 100), 100)\n\n    def test_macro_string_compose(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.funny(\"bunny\"), \"funnybunny\")\n\n    def test_macro_string_compose_json(self):\n        \"\"\"Tests from simple_macros.py\"\"\"\n        json = self._json\n\n        self.assertEqual(\n            json(\"funny\"),\n            {\"args\": [\"x\"], \"body\": \"('funny' + x)\", \"name\": \"funny\", \"type\": \"macro_function\"},\n        )\n\n    def test_macro_math_multipler(self):\n        module = SimpleMacrosTest.module\n\n        x, y = 2, 5\n        self.assertEqual(module.multipler_macro(x, y), x * y)\n\n    def test_macro_math_multiplier_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"multipler_macro\"),\n            {\n                \"args\": [\"x\", \"y\"],\n                \"body\": \"(x * y)\",\n                \"name\": \"multipler_macro\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_math_minus(self):\n        module = SimpleMacrosTest.module\n\n        x, y = 2, 5\n        self.assertEqual(module.minus_macro(x, y), x - y)\n\n    def test_macro_math_minus_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"minus_macro\"),\n            {\n                \"args\": [\"x\", \"y\"],\n                \"body\": \"(x - y)\",\n                \"name\": \"minus_macro\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_math_divide(self):\n        module = SimpleMacrosTest.module\n\n        x, y = 2, 5\n        self.assertEqual(module.divide_macro(x, y), x / y)\n\n    def test_macro_math_divide_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"divide_macro\"),\n            {\n                \"args\": [\"x\", \"y\"],\n                \"body\": \"(x / y)\",\n                \"name\": \"divide_macro\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_math_mod(self):\n        module = SimpleMacrosTest.module\n\n        x, y = 2, 5\n        self.assertEqual(module.mod_macro(x, y), x % y)\n\n    def test_macro_math_mod_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"mod_macro\"),\n            {\"args\": [\"x\", \"y\"], \"body\": \"(x % y)\", \"name\": \"mod_macro\", \"type\": \"macro_function\"},\n        )\n\n    def test_macro_subcall_simple(self):\n        \"\"\"Test use of a constant valued macro within a macro\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.subcall_macro_simple(2), 1)\n\n    def test_macro_subcall_simple_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"subcall_macro_simple\"),\n            {\"args\": [\"x\"], \"body\": \"A\", \"name\": \"subcall_macro_simple\", \"type\": \"macro_function\"},\n        )\n\n    def test_macro_subcall_simple_plus(self):\n        \"\"\"Test math with constant valued macro within a macro\"\"\"\n        module = SimpleMacrosTest.module\n\n        self.assertEqual(module.subcall_macro_simple_plus(2), 1 + 2)\n\n    def test_macro_subcall_simple_plus_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"subcall_macro_simple_plus\"),\n            {\n                \"args\": [\"x\"],\n                \"body\": \"(A + x)\",\n                \"name\": \"subcall_macro_simple_plus\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_subcall_minus(self):\n        \"\"\"Test use of macro function within a macro\"\"\"\n        module = SimpleMacrosTest.module\n\n        x, y = 2, 5\n        self.assertEqual(module.subcall_macro_minus(x, y), x - y)\n\n    def test_macro_subcall_minus_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"subcall_macro_minus\"),\n            {\n                \"args\": [\"x\", \"y\"],\n                \"body\": \"(minus_macro (x, y))\",\n                \"name\": \"subcall_macro_minus\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n    def test_macro_subcall_minus_plus(self):\n        \"\"\"Test math with a macro function within a macro\"\"\"\n        module = SimpleMacrosTest.module\n\n        x, y, z = 2, 5, 1\n        self.assertEqual(module.subcall_macro_minus_plus(x, y, z), (x - y) + z)\n\n    def test_macro_subcall_minus_plus_json(self):\n        json = self._json\n\n        self.assertEqual(\n            json(\"subcall_macro_minus_plus\"),\n            {\n                \"args\": [\"x\", \"y\", \"z\"],\n                \"body\": \"((minus_macro (x, y)) + z)\",\n                \"name\": \"subcall_macro_minus_plus\",\n                \"type\": \"macro_function\",\n            },\n        )\n\n\nclass StructuresTest(unittest.TestCase):\n    \"\"\"Based on structures.py\"\"\"\n\n    @classmethod\n    def setUpClass(cls):\n        \"\"\"\n        NOTE:  Very possibly, if you change this header string, you need to\n        change the line numbers in the JSON output test result below\n        (in test_struct_json).\n        \"\"\"\n        header_str = \"\"\"\n\nstruct foo\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n};\n\nstruct __attribute__((packed)) packed_foo\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n};\n\ntypedef struct\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n} foo_t;\n\ntypedef struct __attribute__((packed))\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n} packed_foo_t;\n\n#pragma pack(push, 4)\ntypedef struct\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n} pragma_packed_foo_t;\n#pragma pack(pop)\n\n#pragma pack(push, thing1, 2)\n#pragma pack(push, thing2, 4)\n#pragma pack(pop)\n#pragma pack(push, thing3, 8)\n#pragma pack(push, thing4, 16)\n#pragma pack(pop, thing3)\nstruct  pragma_packed_foo2\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n};\n#pragma pack(pop, thing1)\n\nstruct  foo3\n{\n        int a;\n        char b;\n        int c;\n        int d : 15;\n        int   : 17;\n};\n\ntypedef int Int;\n\ntypedef struct {\n        int Int;\n} id_struct_t;\n\ntypedef struct {\n  int a;\n  char b;\n} BAR0, *PBAR0;\n\"\"\"\n        cls.module, _ = generate(header_str)\n        cls.json, _ = generate(header_str, output_language=\"json\")\n\n    @classmethod\n    def tearDownClass(cls):\n        del StructuresTest.module\n        cleanup()\n\n    def test_struct_json(self):\n        json_ans = [\n            {\n                \"attrib\": {},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"foo\",\n                \"type\": \"struct\",\n            },\n            {\n                \"attrib\": {\"packed\": True},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"packed_foo\",\n                \"type\": \"struct\",\n            },\n            {\n                \"attrib\": {},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"anon_1\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": True,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {},\n                    \"src\": [\"/some-path/temp.h\", 21],\n                    \"tag\": \"anon_1\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"foo_t\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"attrib\": {\"packed\": True},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"anon_2\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": True,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {\"packed\": True},\n                    \"src\": [\"/some-path/temp.h\", 30],\n                    \"tag\": \"anon_2\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"packed_foo_t\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"attrib\": {\"packed\": True, \"aligned\": [4]},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"anon_3\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": True,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {\"packed\": True, \"aligned\": [4]},\n                    \"src\": [\"/some-path/temp.h\", 40],\n                    \"tag\": \"anon_3\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"pragma_packed_foo_t\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"attrib\": {\"packed\": True, \"aligned\": [2]},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"pragma_packed_foo2\",\n                \"type\": \"struct\",\n            },\n            {\n                \"attrib\": {},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"c\",\n                    },\n                    {\n                        \"bitfield\": \"15\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 15,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": \"d\",\n                    },\n                    {\n                        \"bitfield\": \"17\",\n                        \"ctype\": {\n                            \"Klass\": \"CtypesBitfield\",\n                            \"base\": {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                            \"bitfield\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 17,\n                            },\n                            \"errors\": [],\n                        },\n                        \"name\": None,\n                    },\n                ],\n                \"name\": \"foo3\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"int\",\n                    \"signed\": True,\n                },\n                \"name\": \"Int\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"attrib\": {},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"Int\",\n                    }\n                ],\n                \"name\": \"anon_4\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": True,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"Int\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ]\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {},\n                    \"src\": [\"/some-path/temp.h\", 77],\n                    \"tag\": \"anon_4\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"id_struct_t\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"attrib\": {},\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"int\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"a\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"char\",\n                            \"signed\": True,\n                        },\n                        \"name\": \"b\",\n                    },\n                ],\n                \"name\": \"anon_5\",\n                \"type\": \"struct\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": True,\n                    \"attrib\": {},\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"src\": [\"/some-path/temp.h\", 81],\n                    \"tag\": \"anon_5\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"BAR0\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesPointer\",\n                    \"destination\": {\n                        \"Klass\": \"CtypesStruct\",\n                        \"anonymous\": True,\n                        \"attrib\": {},\n                        \"errors\": [],\n                        \"members\": [\n                            [\n                                \"a\",\n                                {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                            ],\n                            [\n                                \"b\",\n                                {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"char\",\n                                    \"signed\": True,\n                                },\n                            ],\n                        ],\n                        \"opaque\": False,\n                        \"src\": [\"/some-path/temp.h\", 81],\n                        \"tag\": \"anon_5\",\n                        \"variety\": \"struct\",\n                    },\n                    \"errors\": [],\n                    \"qualifiers\": [],\n                },\n                \"name\": \"PBAR0\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": False,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {},\n                    \"src\": [\"/some-path/temp.h\", 3],\n                    \"tag\": \"foo\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"foo\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": False,\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"attrib\": {\"packed\": True},\n                    \"src\": [\"/some-path/temp.h\", 12],\n                    \"tag\": \"packed_foo\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"packed_foo\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": False,\n                    \"attrib\": {\"aligned\": [2], \"packed\": True},\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"src\": [\"/some-path/temp.h\", 56],\n                    \"tag\": \"pragma_packed_foo2\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"pragma_packed_foo2\",\n                \"type\": \"typedef\",\n            },\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesStruct\",\n                    \"anonymous\": False,\n                    \"attrib\": {},\n                    \"errors\": [],\n                    \"members\": [\n                        [\n                            \"a\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"b\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"char\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"c\",\n                            {\n                                \"Klass\": \"CtypesSimple\",\n                                \"errors\": [],\n                                \"longs\": 0,\n                                \"name\": \"int\",\n                                \"signed\": True,\n                            },\n                        ],\n                        [\n                            \"d\",\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 15,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                        [\n                            None,\n                            {\n                                \"Klass\": \"CtypesBitfield\",\n                                \"base\": {\n                                    \"Klass\": \"CtypesSimple\",\n                                    \"errors\": [],\n                                    \"longs\": 0,\n                                    \"name\": \"int\",\n                                    \"signed\": True,\n                                },\n                                \"bitfield\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 17,\n                                },\n                                \"errors\": [],\n                            },\n                        ],\n                    ],\n                    \"opaque\": False,\n                    \"src\": [\"/some-path/temp.h\", 66],\n                    \"tag\": \"foo3\",\n                    \"variety\": \"struct\",\n                },\n                \"name\": \"foo3\",\n                \"type\": \"typedef\",\n            },\n        ]\n\n        compare_json(self, StructuresTest.json, json_ans, True)\n\n    def test_fields(self):\n        \"\"\"Test whether fields are built correctly.\"\"\"\n        struct_foo = StructuresTest.module.struct_foo\n        self.assertEqual(\n            struct_foo._fields_,\n            [\n                (\"a\", ctypes.c_int),\n                (\"b\", ctypes.c_char),\n                (\"c\", ctypes.c_int),\n                (\"d\", ctypes.c_int, 15),\n                (\"unnamed_1\", ctypes.c_int, 17),\n            ],\n        )\n\n    def test_pack(self):\n        \"\"\"Test whether gcc __attribute__((packed)) is interpreted correctly.\"\"\"\n        module = StructuresTest.module\n        unpacked_size = compute_packed(4, [ctypes.c_int] * 3 + [ctypes.c_char])\n        packed_size = compute_packed(1, [ctypes.c_int] * 3 + [ctypes.c_char])\n\n        struct_foo = module.struct_foo\n        struct_packed_foo = module.struct_packed_foo\n        foo_t = module.foo_t\n        packed_foo_t = module.packed_foo_t\n        self.assertEqual(getattr(struct_foo, \"_pack_\", 0), 0)\n        self.assertEqual(getattr(struct_packed_foo, \"_pack_\", 0), 1)\n        self.assertEqual(getattr(foo_t, \"_pack_\", 0), 0)\n        self.assertEqual(getattr(packed_foo_t, \"_pack_\", -1), 1)\n        self.assertEqual(ctypes.sizeof(struct_foo), unpacked_size)\n        self.assertEqual(ctypes.sizeof(foo_t), unpacked_size)\n        self.assertEqual(ctypes.sizeof(struct_packed_foo), packed_size)\n        self.assertEqual(ctypes.sizeof(packed_foo_t), packed_size)\n\n    def test_pragma_pack(self):\n        \"\"\"Test whether #pragma pack(...) is interpreted correctly.\"\"\"\n        module = StructuresTest.module\n        packed4_size = compute_packed(4, [ctypes.c_int] * 3 + [ctypes.c_char])\n        packed2_size = compute_packed(2, [ctypes.c_int] * 3 + [ctypes.c_char])\n        unpacked_size = compute_packed(4, [ctypes.c_int] * 3 + [ctypes.c_char])\n\n        pragma_packed_foo_t = module.pragma_packed_foo_t\n        struct_pragma_packed_foo2 = module.struct_pragma_packed_foo2\n        struct_foo3 = module.struct_foo3\n\n        self.assertEqual(getattr(pragma_packed_foo_t, \"_pack_\", 0), 4)\n        self.assertEqual(getattr(struct_pragma_packed_foo2, \"_pack_\", 0), 2)\n        self.assertEqual(getattr(struct_foo3, \"_pack_\", 0), 0)\n\n        self.assertEqual(ctypes.sizeof(pragma_packed_foo_t), packed4_size)\n        self.assertEqual(ctypes.sizeof(struct_pragma_packed_foo2), packed2_size)\n        self.assertEqual(ctypes.sizeof(struct_foo3), unpacked_size)\n\n    def test_typedef_vs_field_id(self):\n        \"\"\"Test whether local field identifier names can override external\n        typedef names.\n        \"\"\"\n        module = StructuresTest.module\n        Int = module.Int\n        id_struct_t = module.id_struct_t\n        self.assertEqual(Int, ctypes.c_int)\n        self.assertEqual(id_struct_t._fields_, [(\"Int\", ctypes.c_int)])\n\n    def test_anonymous_tag_uniformity(self):\n        \"\"\"Test whether anonymous structs with multiple declarations all resolve\n        to the same type.\n        \"\"\"\n        module = StructuresTest.module\n        BAR0 = module.BAR0\n        PBAR0 = module.PBAR0\n        self.assertEqual(PBAR0._type_, BAR0)\n\n\nclass MathTest(unittest.TestCase):\n    \"\"\"Based on math_functions.py\"\"\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n#include <math.h>\n#define sin_plus_y(x,y) (sin(x) + (y))\n\"\"\"\n        if sys.platform == \"win32\":\n            # pick something from %windir%\\system32\\msvc*dll that include stdlib\n            libraries = [\"msvcrt.dll\"]\n            libraries = [\"msvcrt\"]\n        elif sys.platform.startswith(\"linux\"):\n            libraries = [\"libm.so.6\"]\n        else:\n            libraries = [\"libc\"]\n        cls.module, _ = generate(header_str, libraries=libraries, all_headers=True)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n    def test_sin(self):\n        \"\"\"Based on math_functions.py\"\"\"\n        module = MathTest.module\n\n        self.assertEqual(module.sin(2), math.sin(2))\n\n    def test_sqrt(self):\n        \"\"\"Based on math_functions.py\"\"\"\n        module = MathTest.module\n\n        self.assertEqual(module.sqrt(4), 2)\n\n        def local_test():\n            module.sin(\"foobar\")\n\n        self.assertRaises(ctypes.ArgumentError, local_test)\n\n    def test_bad_args_string_not_number(self):\n        \"\"\"Based on math_functions.py\"\"\"\n        module = MathTest.module\n\n        def local_test():\n            module.sin(\"foobar\")\n\n        self.assertRaises(ctypes.ArgumentError, local_test)\n\n    def test_subcall_sin(self):\n        \"\"\"Test math with sin(x) in a macro\"\"\"\n        module = MathTest.module\n\n        self.assertEqual(module.sin_plus_y(2, 1), math.sin(2) + 1)\n\n\nclass EnumTest(unittest.TestCase):\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n        typedef enum {\n            TEST_1 = 0,\n            TEST_2\n        } test_status_t;\n        \"\"\"\n        cls.module, _ = generate(header_str)\n        cls.json, _ = generate(header_str, output_language=\"json\")\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module, cls.json\n        cleanup()\n\n    def test_enum(self):\n        self.assertEqual(EnumTest.module.TEST_1, 0)\n        self.assertEqual(EnumTest.module.TEST_2, 1)\n\n    def test_enum_json(self):\n        json_ans = [\n            {\n                \"fields\": [\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"ConstantExpressionNode\",\n                            \"errors\": [],\n                            \"is_literal\": False,\n                            \"value\": 0,\n                        },\n                        \"name\": \"TEST_1\",\n                    },\n                    {\n                        \"ctype\": {\n                            \"Klass\": \"BinaryExpressionNode\",\n                            \"can_be_ctype\": [False, False],\n                            \"errors\": [],\n                            \"format\": \"(%s + %s)\",\n                            \"left\": {\n                                \"Klass\": \"IdentifierExpressionNode\",\n                                \"errors\": [],\n                                \"name\": \"TEST_1\",\n                            },\n                            \"name\": \"addition\",\n                            \"right\": {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 1,\n                            },\n                        },\n                        \"name\": \"TEST_2\",\n                    },\n                ],\n                \"name\": \"anon_1\",\n                \"type\": \"enum\",\n            },\n            {\"name\": \"TEST_1\", \"type\": \"constant\", \"value\": \"0\"},\n            {\"name\": \"TEST_2\", \"type\": \"constant\", \"value\": \"(TEST_1 + 1)\"},\n            {\n                \"ctype\": {\n                    \"Klass\": \"CtypesEnum\",\n                    \"anonymous\": True,\n                    \"enumerators\": [\n                        [\n                            \"TEST_1\",\n                            {\n                                \"Klass\": \"ConstantExpressionNode\",\n                                \"errors\": [],\n                                \"is_literal\": False,\n                                \"value\": 0,\n                            },\n                        ],\n                        [\n                            \"TEST_2\",\n                            {\n                                \"Klass\": \"BinaryExpressionNode\",\n                                \"can_be_ctype\": [False, False],\n                                \"errors\": [],\n                                \"format\": \"(%s + %s)\",\n                                \"left\": {\n                                    \"Klass\": \"IdentifierExpressionNode\",\n                                    \"errors\": [],\n                                    \"name\": \"TEST_1\",\n                                },\n                                \"name\": \"addition\",\n                                \"right\": {\n                                    \"Klass\": \"ConstantExpressionNode\",\n                                    \"errors\": [],\n                                    \"is_literal\": False,\n                                    \"value\": 1,\n                                },\n                            },\n                        ],\n                    ],\n                    \"errors\": [],\n                    \"opaque\": False,\n                    \"src\": [\"/some-path/temp.h\", 2],\n                    \"tag\": \"anon_1\",\n                },\n                \"name\": \"test_status_t\",\n                \"type\": \"typedef\",\n            },\n        ]\n\n        compare_json(self, EnumTest.json, json_ans, True)\n\n\nclass PrototypeTest(unittest.TestCase):\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n        int bar2(int a);\n        int bar(int);\n        void foo(void);\n        void foo2(void) __attribute__((stdcall));\n        void * __attribute__((stdcall)) foo3(void);\n        void * __attribute__((stdcall)) * foo4(void);\n        void foo5(void) __attribute__((__stdcall__));\n        \"\"\"\n        cls.json, output = generate(header_str, output_language=\"json\")\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.json\n        cleanup()\n\n    def test_function_prototypes_json(self):\n        json_ans = [\n            {\n                \"args\": [\n                    {\n                        \"Klass\": \"CtypesSimple\",\n                        \"errors\": [],\n                        \"identifier\": \"a\",\n                        \"longs\": 0,\n                        \"name\": \"int\",\n                        \"signed\": True,\n                    }\n                ],\n                \"attrib\": {},\n                \"name\": \"bar2\",\n                \"return\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"int\",\n                    \"signed\": True,\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [\n                    {\n                        \"Klass\": \"CtypesSimple\",\n                        \"errors\": [],\n                        \"identifier\": \"\",\n                        \"longs\": 0,\n                        \"name\": \"int\",\n                        \"signed\": True,\n                    }\n                ],\n                \"attrib\": {},\n                \"name\": \"bar\",\n                \"return\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"int\",\n                    \"signed\": True,\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [],\n                \"attrib\": {},\n                \"name\": \"foo\",\n                \"return\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"void\",\n                    \"signed\": True,\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [],\n                \"attrib\": {\"stdcall\": True},\n                \"name\": \"foo2\",\n                \"return\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"void\",\n                    \"signed\": True,\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [],\n                \"attrib\": {\"stdcall\": True},\n                \"name\": \"foo3\",\n                \"return\": {\n                    \"Klass\": \"CtypesPointer\",\n                    \"destination\": {\"Klass\": \"CtypesSpecial\", \"errors\": [], \"name\": \"c_ubyte\"},\n                    \"errors\": [],\n                    \"qualifiers\": [],\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [],\n                \"attrib\": {\"stdcall\": True},\n                \"name\": \"foo4\",\n                \"return\": {\n                    \"Klass\": \"CtypesPointer\",\n                    \"destination\": {\n                        \"Klass\": \"CtypesPointer\",\n                        \"destination\": {\n                            # this return type seems like it really ought to be\n                            # the same as for foo3\n                            \"Klass\": \"CtypesSimple\",\n                            \"errors\": [],\n                            \"longs\": 0,\n                            \"name\": \"void\",\n                            \"signed\": True,\n                        },\n                        \"errors\": [],\n                        \"qualifiers\": [],\n                    },\n                    \"errors\": [],\n                    \"qualifiers\": [],\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n            {\n                \"args\": [],\n                \"attrib\": {\"stdcall\": True},\n                \"name\": \"foo5\",\n                \"return\": {\n                    \"Klass\": \"CtypesSimple\",\n                    \"errors\": [],\n                    \"longs\": 0,\n                    \"name\": \"void\",\n                    \"signed\": True,\n                },\n                \"type\": \"function\",\n                \"variadic\": False,\n            },\n        ]\n\n        compare_json(self, PrototypeTest.json, json_ans, True)\n\n\nclass LongDoubleTest(unittest.TestCase):\n    \"Test correct parsing and generation of 'long double' type\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n        struct foo\n        {\n            long double is_bar;\n            int a;\n        };\n        \"\"\"\n        cls.module, _ = generate(header_str)  # , all_headers=True)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n    def test_longdouble_type(self):\n        \"\"\"Test is long double is correctly parsed\"\"\"\n        module = LongDoubleTest.module\n        struct_foo = module.struct_foo\n        self.assertEqual(\n            struct_foo._fields_, [(\"is_bar\", ctypes.c_longdouble), (\"a\", ctypes.c_int)]\n        )\n\n\nclass MainTest(unittest.TestCase):\n    script = os.path.join(TEST_DIR, os.pardir, \"run.py\")\n\n    \"\"\"Test primary entry point used for ctypesgen when called as executable:\n    ctypesgen.main.main()\n\n    This test does not directly execute the script that is autogenerated by\n    setup.py, but does instead test the entry point as used by that script by\n    executing `run.py`.  `run.py` is a local work-alike (as compared to the\n    setuptools-autogenerated script) that is only meant to be run in its *in*\n    the root of source code tree.\n    \"\"\"\n\n    @staticmethod\n    def _exec(args):\n        p = Popen([sys.executable, MainTest.script] + args, stdout=PIPE, stderr=PIPE)\n        o, e = p.communicate()\n        print(o, e, p.returncode)\n        return o, e, p.returncode\n\n    def test_version(self):\n        \"\"\"Test version string returned by script interface\"\"\"\n        o, e, c = self._exec([\"--version\"])\n        self.assertEqual(c, 0)\n        self.assertEqual(o.decode().strip(), ctypesgen_version())\n        self.assertEqual(e.decode(), \"\")\n\n    def test_help(self):\n        \"\"\"Test that script at least generates a help\"\"\"\n        o, e, c = self._exec([\"--help\"])\n        self.assertEqual(c, 0)\n        self.assertEqual(\n            o.decode().splitlines()[0].startswith(\"usage: run.py\"), True\n        )\n        self.assertGreater(len(o), 3000)  # its long, so it must be the generated help\n        self.assertEqual(e.decode(), \"\")\n\n    def test_invalid_option(self):\n        \"\"\"Test that script at least generates a help\"\"\"\n        o, e, c = self._exec([\"random_header.h\", \"--oh-what-a-goose-i-am\"])\n        self.assertEqual(c, 2)\n        self.assertEqual(o.decode(), \"\")\n        self.assertEqual(\n            e.decode().splitlines()[0].startswith(\"usage: run.py\"), True\n        )\n        self.assertIn(\"error: unrecognized arguments: --oh-what-a-goose-i-am\", e.decode())\n\n\nclass UncheckedTest(unittest.TestCase):\n    \"\"\"Fixing a bug in 1.0.0 - basic type returns of function pointers get treated as pointers\"\"\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n        typedef int (*some_type_of_answer)(void*);\n        \"\"\"\n        cls.module, cls.output = generate(header_str, all_headers=False)\n\n    def test_unchecked_prototype(self):\n        \"\"\"Test is function type marked UNCHECKED (function pointer returning\n        int) is handled correctly\"\"\"\n        module = UncheckedTest.module\n        A = module.some_type_of_answer()\n        self.assertEqual(A.restype, ctypes.c_int)\n        self.assertEqual(A.argtypes, (ctypes.c_void_p,))\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        cleanup()\n\n\nclass ConstantsTest(unittest.TestCase):\n    \"Test correct parsing and generation of NULL\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"\"\"\n#define I_CONST_HEX 0xAFAFAFu\n#define I_CONST_DEC 15455u\n#define I_CONST_OCT 0433u\n#define I_CONST_BIN 0b0101L\n#define I_ZERO 0\n#define I_ONE 1\n#define I_ZERO_SUF 0L\n\n#define F_CONST_1 155e+0L\n#define F_CONST_2 35.2e+0f\n#define F_CONST_3 35.e+0f\n#define F_CONST_4 0xAFp012l\n#define F_CONST_5 0x1.FFFFFEp+127f\n#define F_CONST_6 0xAFAF.p35f\n\nstruct foo\n{\n        int a;\n        char b;\n        int c: 0b10;\n        int d : 0xf;\n        int : 17;\n};\n\n#define CHAR_CONST u'🍌'\n\"\"\"\n        cls.module, _ = generate(header_str)\n\n    @classmethod\n    def tearDownClass(cls):\n        del ConstantsTest.module\n        cleanup()\n\n    def test_integer_constants(self):\n        \"\"\"Test if integer constants is correctly parsed\"\"\"\n        self.assertEqual(ConstantsTest.module.I_CONST_HEX, 0xAFAFAF)\n        self.assertEqual(ConstantsTest.module.I_CONST_DEC, int(15455))\n        self.assertEqual(ConstantsTest.module.I_CONST_OCT, 0o433)\n        self.assertEqual(ConstantsTest.module.I_CONST_BIN, 0b0101)\n        self.assertEqual(ConstantsTest.module.I_CONST_BIN, 5)\n        self.assertEqual(ConstantsTest.module.I_ZERO, int(0))\n        self.assertEqual(ConstantsTest.module.I_ONE, int(1))\n        self.assertEqual(ConstantsTest.module.I_ZERO_SUF, int(0))\n\n    def test_floating_constants(self):\n        \"\"\"Test if floating constants is correctly parsed\"\"\"\n        self.assertEqual(ConstantsTest.module.F_CONST_1, 155e0)\n        self.assertEqual(ConstantsTest.module.F_CONST_2, 35.2e0)\n        self.assertEqual(ConstantsTest.module.F_CONST_3, 35.0e0)\n        self.assertEqual(ConstantsTest.module.F_CONST_4, float.fromhex(\"0xAFp012\"))\n        self.assertEqual(ConstantsTest.module.F_CONST_5, float.fromhex(\"0x1.fffffep+127\"))\n        self.assertEqual(ConstantsTest.module.F_CONST_6, float.fromhex(\"0xAFAF.p35\"))\n\n    def test_struct_fields(self):\n        \"\"\"Test whether fields are built correctly.\"\"\"\n        struct_foo = ConstantsTest.module.struct_foo\n        self.assertEqual(\n            struct_foo._fields_,\n            [\n                (\"a\", ctypes.c_int),\n                (\"b\", ctypes.c_char),\n                (\"c\", ctypes.c_int, 2),\n                (\"d\", ctypes.c_int, 15),\n                (\"unnamed_1\", ctypes.c_int, 17),\n            ],\n        )\n\n    def test_character_constants(self):\n        \"\"\"Test if integer constants is correctly parsed\"\"\"\n        self.assertEqual(ConstantsTest.module.CHAR_CONST, \"🍌\")\n\n\nclass NULLTest(unittest.TestCase):\n    \"Test correct parsing and generation of NULL\"\n\n    @classmethod\n    def setUpClass(cls):\n        header_str = \"#define A_NULL_MACRO NULL\"\n        cls.module, _ = generate(header_str)  # , all_headers=True)\n\n    @classmethod\n    def tearDownClass(cls):\n        del NULLTest.module\n        cleanup()\n\n    def test_null_type(self):\n        \"\"\"Test if NULL is correctly parsed\"\"\"\n        self.assertEqual(NULLTest.module.A_NULL_MACRO, None)\n\n\n@unittest.skipUnless(sys.platform == \"darwin\", \"requires Mac\")\nclass MacromanEncodeTest(unittest.TestCase):\n    \"\"\"Test if source file with mac_roman encoding is correctly parsed.\n\n    This test is skipped on non-mac platforms.\n    \"\"\"\n\n    @classmethod\n    def setUpClass(cls):\n        cls.mac_roman_file = \"temp_mac.h\"\n        mac_header_str = b\"\"\"\n        #define kICHelper                       \"\\xa9\\\\pHelper\\xa5\"\n\n        \"\"\"\n\n        with open(cls.mac_roman_file, \"wb\") as mac_file:\n            mac_file.write(mac_header_str)\n\n        header_str = \"\"\"\n        #include \"temp_mac.h\"\n\n        #define MYSTRING kICHelper\n\n        \"\"\"\n\n        cls.module, _ = generate(header_str)\n\n    @classmethod\n    def tearDownClass(cls):\n        del cls.module\n        os.remove(cls.mac_roman_file)\n        cleanup()\n\n    def test_macroman_encoding_source(self):\n        module = MacromanEncodeTest.module\n        expected = b\"\\xef\\xbf\\xbd\\\\pHelper\\xef\\xbf\\xbd\".decode(\"utf-8\")\n        self.assertEqual(module.MYSTRING, expected)\n\n\ndef main(argv=None):\n    if argv is None:\n        argv = sys.argv\n\n    set_logging_level(logging.CRITICAL)  # do not log anything\n    unittest.main()\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "todo.txt",
    "content": "1. Convert defines from \"errno.h\" into imports from the Python errno module.\n2. Search through code for \"XXX\" and see what can be done.\n"
  },
  {
    "path": "tox.ini",
    "content": "[tox]\nenvlist = py37, py38, py39\nskip_missing_interpreters = true\n\n[testenv]\ndeps =\n    pytest\ncommands =\n    pytest -v --showlocals tests/testsuite.py\n    # pytest -v -v -k struct_json --showlocals ctypesgen/test/testsuite.py\n\n[testenv:black]\ndeps =\n    six\n    black==23.3.0\nbasepython = python3.7\ncommands =\n    black --check --diff setup.py run.py ctypesgen/ --exclude='.*tab.py|ctypesgen/parser/cgrammar.py|ctypesgen/parser/lex.py|ctypesgen/parser/yacc.py'\n\n[testenv:package]\ndeps =\n    build\n    twine\nbasepython = python3.7\ncommands =\n    python -m build\n    twine check dist/*.whl\n\n[testenv:upload]\ndeps =\n    build\n    twine\nbasepython = python3.7\ncommands =\n    python -m build\n    twine upload dist/*.whl\n"
  }
]