[
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a report to help resolve an issue.\ntitle: ''\nlabels: bug\nassignees: ''\n\n---\n\n**Describe the issue**\nA clear and concise description of what the bug is.\n\n**Run Log**\nPlease attach your `run.log` detailing the issue.  \n\n**Other comments (optional)**\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature request\nabout: Suggest an idea for this project\ntitle: ''\nlabels: enhancement\nassignees: ''\n\n---\n\n**Is your feature request related to a problem? Please describe.**\nA clear and concise description of what the problem is.\n\n**Describe the solution you'd like**\nA clear and concise description of what you want to happen.\n\n**Additional context**\nAdd any other context about the feature request here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/question---support-.md",
    "content": "---\nname: 'Question / Support '\nabout: Ask a question or reqeust support\ntitle: ''\nlabels: question\nassignees: ''\n\n---\n\n**\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/question.md",
    "content": "---\nname: Question\nabout: Ask a question\ntitle: ''\nlabels: question\nassignees: ''\n\n---\n\n**Describe you query**\nWhat would you like to know / what are you trying to achieve?\n"
  },
  {
    "path": ".github/workflows/main.yml",
    "content": "name: WA Test Suite\n\non:\n  push:\n    branches: [ master ]\n  pull_request:\n    branches: [ master ]\n    types: [opened, synchronize, reopened, ready_for_review]\n  schedule:\n    - cron: 0 2 * * *\n  # Allows runing this workflow manually from the Actions tab\n  workflow_dispatch:\n\njobs:\n  Run-Linters-and-Tests:\n    runs-on: ubuntu-22.04\n    steps:\n    - uses: actions/checkout@v2\n    - name: Set up Python 3.8.18\n      uses: actions/setup-python@v2\n      with:\n        python-version: 3.8.18\n    - name: git-bash\n      uses: pkg-src/github-action-git-bash@v1.1\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        cd /tmp && git clone https://github.com/ARM-software/devlib.git && cd devlib && pip install .\n        cd $GITHUB_WORKSPACE && pip install .[test]\n        python -m pip install pylint==2.6.2 pep8 flake8 mock nose\n    - name: Run pylint\n      run: |\n        cd $GITHUB_WORKSPACE && ./dev_scripts/pylint wa/\n    - name: Run PEP8\n      run: |\n        cd $GITHUB_WORKSPACE && ./dev_scripts/pep8 wa\n    - name: Run nose tests\n      run: |\n        nosetests\n\n  Execute-Test-Workload-and-Process:\n    runs-on: ubuntu-22.04\n    strategy:\n      matrix:\n        python-version: [3.7.17, 3.8.18, 3.9.21, 3.10.16, 3.13.2]\n    steps:\n    - uses: actions/checkout@v2\n    - name: Set up Python ${{ matrix.python-version }}\n      uses: actions/setup-python@v2\n      with:\n        python-version: ${{ matrix.python-version }}\n    - name: git-bash\n      uses: pkg-src/github-action-git-bash@v1.1\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        cd /tmp && git clone https://github.com/ARM-software/devlib.git && cd devlib && pip install .\n        cd $GITHUB_WORKSPACE && pip install .\n    - name: Run test workload\n      run: |\n        cd /tmp && wa run $GITHUB_WORKSPACE/tests/ci/idle_agenda.yaml -v -d idle_workload\n    - name: Test Process Command\n      run: |\n        cd /tmp && wa process -f -p csv idle_workload\n\n  Test-WA-Commands:\n    runs-on: ubuntu-22.04\n    strategy:\n      matrix:\n        python-version: [3.7.17, 3.8.18, 3.9.21, 3.10.16, 3.13.2]\n    steps:\n    - uses: actions/checkout@v2\n    - name: Set up Python ${{ matrix.python-version }}\n      uses: actions/setup-python@v2\n      with:\n        python-version: ${{ matrix.python-version }}\n    - name: git-bash\n      uses: pkg-src/github-action-git-bash@v1.1\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        cd /tmp && git clone https://github.com/ARM-software/devlib.git && cd devlib && pip install .\n        cd $GITHUB_WORKSPACE && pip install .\n    - name: Test Show Command\n      run: |\n        wa show dhrystone && wa show generic_android && wa show trace-cmd && wa show csv\n    - name: Test List Command\n      run: |\n        wa list all\n    - name: Test Create Command\n      run: |\n        wa create agenda dhrystone generic_android csv trace_cmd && wa create package test && wa create workload test\n"
  },
  {
    "path": ".gitignore",
    "content": "*.egg-info\n*.pyc\n*.bak\n*.o\n*.cmd\n*.iml\nModule.symvers\nmodules.order\n*~\ntags\nbuild/\ndist/\n.ropeproject/\nwa_output/\ndoc/source/plugins/\nMANIFEST\n*.orig\nlocal.properties\npmu_logger.mod.c\n.tmp_versions\nobj/\nlibs/armeabi\n**/uiauto/**/build/\n**/uiauto/**/.gradle\n**/uiauto/**/.idea\n**/uiauto/**/proguard-rules.pro\n**/uiauto/app/libs/\n**/uiauto/*.properties\n**/uiauto/**/.project\n**/uiauto/**/.settings\n**/uiauto/**/.classpath\ndoc/source/developer_information/developer_guide/instrument_method_map.rst\ndoc/source/run_config/\n.eggs\n"
  },
  {
    "path": ".readthedocs.yml",
    "content": "# .readthedocs.yml\n# Read the Docs configuration file\n# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details\n\n# Required\nversion: 2\n\n# Build documentation in the docs/ directory with Sphinx\nsphinx:\n  builder: html\n  configuration: doc/source/conf.py\n\n# Build the docs in additional formats such as PDF and ePub\nformats: all\n\n\n# Configure the build environment\nbuild:\n  os: ubuntu-22.04\n  tools:\n    python: \"3.11\"\n\n# Ensure doc dependencies are installed before building\npython:\n   install:\n      - requirements: doc/requirements.txt\n      - method: pip\n        path: .\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "recursive-include scripts *\nrecursive-include doc *\nrecursive-include wa *\n"
  },
  {
    "path": "README.rst",
    "content": "[DEPRECATED] Workload Automation\n++++++++++++++++++++++++++++++++++++++++++++\n\n⚠️ **Development of this project has moved to a new repository.**\n\nPlease visit the new location for the latest code, issues, and contributions:\n\nhttps://gitlab.arm.com/tooling/workload-automation/workload-automation\n\nThis repository is no longer actively maintained.\n\n-------------\n\nWorkload Automation (WA) is a framework for executing workloads and collecting\nmeasurements on Android and Linux devices. WA includes automation for nearly 40\nworkloads and supports some common instrumentation (ftrace, hwmon) along with a\nnumber of output formats.\n\nWA is designed primarily as a developer tool/framework to facilitate data driven\ndevelopment by providing a method of collecting measurements from a device in a\nrepeatable way.\n\nWA is highly extensible. Most of the concrete functionality is implemented via\nplug-ins, and it is easy to write new plug-ins to support new device types,\nworkloads, instruments or output processing.\n\n\nRequirements\n============\n\n- Python 3.5+\n- Linux (should work on other Unixes, but untested)\n- Latest Android SDK (ANDROID_HOME must be set) for Android devices, or\n- SSH for Linux devices\n\n\nInstallation\n============\n\nTo install::\n\n        git clone git@github.com:ARM-software/workload-automation.git workload-automation\n        sudo -H python setup [install|develop]\n\nNote: A `requirements.txt` is included however this is designed to be used as a\nreference for known working versions rather than as part of a standard\ninstallation.\n\nPlease refer to the `installation section <http://workload-automation.readthedocs.io/en/latest/user_information.html#install>`_\nin the documentation for more details.\n\n\nBasic Usage\n===========\n\nPlease see the `Quickstart <http://workload-automation.readthedocs.io/en/latest/user_information.html#user-guide>`_\nsection of the documentation.\n\n\nDocumentation\n=============\n\nYou can view pre-built HTML documentation `here <http://workload-automation.readthedocs.io/en/latest/>`_.\n\nDocumentation in reStructuredText format may be found under ``doc/source``. To\ncompile it into cross-linked HTML, make sure you have `Sphinx\n<http://sphinx-doc.org/install.html>`_ installed, and then ::\n\n        cd doc\n        make html\n\n\nLicense\n=======\n\nWorkload Automation is distributed under `Apache v2.0 License\n<http://www.apache.org/licenses/LICENSE-2.0>`_. Workload automation includes\nbinaries distributed under different licenses (see LICENSE files in specific\ndirectories).\n\n\nFeedback, Contributions and Support\n===================================\n\n- Please use the GitHub Issue Tracker associated with this repository for\n  feedback.\n- ARM licensees may contact ARM directly via their partner managers.\n- We welcome code contributions via GitHub Pull requests. Please see\n  \"Contributing Code\" section of the documentation for details.\n"
  },
  {
    "path": "dev_scripts/README",
    "content": "This directory contains scripts that aid the development of Workload Automation.\nThey were written to work as part of WA development environment and are not\nguarnteed to work if moved outside their current location. They should not be\ndistributed as part of WA releases.\n\nScripts\n-------\n\n:clean_install: Performs a clean install of WA from source. This will remove any\n                existing WA install (regardless of whether it was made from\n                source or through a tarball with pip).\n\n:clear_env: Clears ~/.workload_automation.\n\n:get_apk_versions: Prints out a table of APKs and their versons found under the\n                   path specified as the argument.\n\n:pep8: Runs flake8 (formerly called \"pep8\") code checker (must be\n       installed) over wa/ with the correct settings for WA.\n\n:pylint: Runs pylint (must be installed) over wlauto with the correct settings\n         for WA.\n\n:rebuild_all_uiauto: Rebuild UIAutomator APKs for workloads that have them. This\n                     is useful to make sure they're all using the latest\n                     uiauto.arr after the latter has been updated.\n\n:update_copyrights: Checks and updates the year of the copyright in source files,\n                    adding a copyright header if it's not already there.\n"
  },
  {
    "path": "dev_scripts/clean_install",
    "content": "#!/usr/bin/env python\nimport os\nimport sys\nimport shutil\nimport logging\n\n\nlogging.basicConfig(level=logging.INFO)\n\n\ndef get_installed_path():\n    paths = [p for p in sys.path if len(p) > 2]\n    for path in paths:\n        candidate = os.path.join(path, 'wlauto')\n        if os.path.isdir(candidate):\n            return candidate\n\n\nif __name__ == '__main__':\n    installed_path = get_installed_path()\n    if installed_path:\n        logging.info('Removing installed package from {}.'.format(installed_path))\n        shutil.rmtree(installed_path)\n    if os.path.isdir('build'):\n        logging.info('Removing local build directory.')\n        shutil.rmtree('build')\n    logging.info('Removing *.pyc files.')\n    for root, dirs, files in os.walk('wlauto'):\n        for file in files:\n            if file.lower().endswith('.pyc'):\n                os.remove(os.path.join(root, file))\n\n    os.system('python setup.py install')\n\n"
  },
  {
    "path": "dev_scripts/clear_env",
    "content": "#!/bin/bash\n# Clear workload automation user environment.\nrm -rf ~/.workload_automation/\n"
  },
  {
    "path": "dev_scripts/get_apk_versions",
    "content": "#!/usr/bin/env python\nimport os\nimport sys\nimport re\nimport logging\nimport subprocess\nimport argparse\n\nsys.path.append(os.path.join(os.path.dirname(__file__), '..'))\n\nfrom wlauto.exceptions import WAError, ToolError\nfrom wlauto.utils.doc import format_simple_table\n\n\ndef get_aapt_path():\n    \"\"\"Return the full path to aapt tool.\"\"\"\n    sdk_path = os.getenv('ANDROID_HOME')\n    if not sdk_path:\n        raise ToolError('Please make sure you have Android SDK installed and have ANDROID_HOME set.')\n    build_tools_directory = os.path.join(sdk_path, 'build-tools')\n    versions = os.listdir(build_tools_directory)\n    for version in reversed(sorted(versions)):\n        aapt_path = os.path.join(build_tools_directory, version, 'aapt')\n        if os.path.isfile(aapt_path):\n            logging.debug('Found aapt for version {}'.format(version))\n            return aapt_path\n    else:\n        raise ToolError('aapt not found. Please make sure at least one Android platform is installed.')\n\n\ndef get_apks(path):\n    \"\"\"Return a list of paths to all APK files found under the specified directory.\"\"\"\n    apks = []\n    for root, dirs, files in os.walk(path):\n        for file in files:\n            _, ext = os.path.splitext(file)\n            if ext.lower() == '.apk':\n                apks.append(os.path.join(root, file))\n    return apks\n\n\nclass ApkVersionInfo(object):\n\n    def __init__(self, workload=None, package=None, label=None, version_name=None, version_code=None):\n        self.workload = workload\n        self.package = package\n        self.label = label\n        self.version_name = version_name\n        self.version_code = version_code\n\n    def to_tuple(self):\n        return (self.workload, self.package, self.label, self.version_name, self.version_code)\n\n\nversion_regex = re.compile(r\"name='(?P<name>[^']+)' versionCode='(?P<vcode>[^']+)' versionName='(?P<vname>[^']+)'\")\n\n\ndef extract_version_info(apk_path, aapt):\n    command = [aapt, 'dump', 'badging', apk_path]\n    output = subprocess.check_output(command)\n    version_info = ApkVersionInfo(workload=apk_path.split(os.sep)[-2])\n    for line in output.split('\\n'):\n        if line.startswith('application-label:'):\n            version_info.label = line.split(':')[1].strip().replace('\\'', '')\n        elif line.startswith('package:'):\n            match = version_regex.search(line)\n            if match:\n                version_info.package = match.group('name')\n                version_info.version_code = match.group('vcode')\n                version_info.version_name = match.group('vname')\n        else:\n            pass  # not interested\n    return version_info\n\n\ndef get_apk_versions(path, aapt):\n    apks = get_apks(path)\n    versions = [extract_version_info(apk, aapt) for apk in apks]\n    return versions\n\n\nif __name__ == '__main__':\n    try:\n        aapt = get_aapt_path()\n        parser = argparse.ArgumentParser()\n        parser.add_argument('path', metavar='PATH', help='Location to look for APKs.')\n        args = parser.parse_args()\n\n        versions = get_apk_versions(args.path, aapt)\n        table = format_simple_table([v.to_tuple() for v in versions],\n                                    align='<<<>>',\n                                    headers=['workload', 'package', 'name', 'version code', 'version name'])\n        print table\n    except WAError, e:\n        logging.error(e)\n        sys.exit(1)\n"
  },
  {
    "path": "dev_scripts/pep8",
    "content": "#!/bin/bash\n\nDEFAULT_DIRS=(\n        wa\n)\n\nEXCLUDE=wa/tests,wa/framework/target/descriptor.py\nEXCLUDE_COMMA=\nIGNORE=E501,E265,E266,W391,E401,E402,E731,W503,W605,F401\n\nif ! hash flake8 2>/dev/null; then\n\techo \"flake8 not found in PATH\"\n\techo \"you can install it with \\\"sudo pip install flake8\\\"\"\n\texit 1\nfi\n\nif [[ \"$1\" == \"\" ]]; then\n\tTHIS_DIR=\"`dirname \\\"$0\\\"`\"\n\tpushd $THIS_DIR/.. > /dev/null\n\tfor dir in \"${DEFAULT_DIRS[@]}\"; do\n\t\tflake8 --exclude=$EXCLUDE,$EXCLUDE_COMMA --ignore=$IGNORE $dir\n\tdone\n\tflake8 --exclude=$EXCLUDE --ignore=$IGNORE,E241 $(echo \"$EXCLUDE_COMMA\" | sed 's/,/ /g')\n\tpopd > /dev/null\nelse\n\tflake8 --exclude=$EXCLUDE,$EXCLUDE_COMMA --ignore=$IGNORE $1\nfi\n\n"
  },
  {
    "path": "dev_scripts/pylint",
    "content": "#!/bin/bash\nDEFAULT_DIRS=(\n\twa\n)\n\ntarget=$1\n\ncompare_versions() {\n    if [[ $1 == $2 ]]; then\n        return 0\n    fi\n\n    local IFS=.\n    local i ver1=($1) ver2=($2)\n\n    for ((i=${#ver1[@]}; i<${#ver2[@]}; i++)); do\n        ver1[i]=0\n    done\n\n    for ((i=0; i<${#ver1[@]}; i++)); do\n        if [[ -z ${ver2[i]} ]]; then\n            ver2[i]=0\n        fi\n        if ((10#${ver1[i]} > 10#${ver2[i]})); then\n            return 1\n        fi\n        if ((10#${ver1[i]} < 10#${ver2[i]})); then\n            return 2\n        fi\n    done\n\n    return 0\n}\n\npylint_version=$(python -c 'from pylint.__pkginfo__ import version; print(version)' 2>/dev/null)\nif [ \"x$pylint_version\" == \"x\" ]; then\n\tpylint_version=$(python3 -c 'from pylint.__pkginfo__ import version; print(version)' 2>/dev/null)\nfi\nif [ \"x$pylint_version\" == \"x\" ]; then\n\tpylint_version=$(python3 -c 'from pylint import version; print(version)' 2>/dev/null)\nfi\nif [ \"x$pylint_version\" == \"x\" ]; then\n\techo \"ERROR: no pylint verison found; is it installed?\"\n\texit 1\nfi\n\ncompare_versions $pylint_version \"1.9.2\"\nresult=$?\nif [ \"$result\" == \"2\" ]; then\n\techo \"ERROR: pylint version must be at least 1.9.2; found $pylint_version\"\n\texit 1\nfi\n\nset -e\nTHIS_DIR=\"`dirname \\\"$0\\\"`\"\nCWD=$PWD\npushd $THIS_DIR > /dev/null\nif [[ \"$target\" == \"\" ]]; then\n\tfor dir in \"${DEFAULT_DIRS[@]}\"; do\n\t\tPYTHONPATH=. pylint --rcfile ../extras/pylintrc --load-plugins pylint_plugins ../$dir\n\tdone\nelse\n\tPYTHONPATH=. pylint --rcfile ../extras/pylintrc --load-plugins pylint_plugins $CWD/$target\nfi\npopd > /dev/null\n"
  },
  {
    "path": "dev_scripts/pylint_plugins.py",
    "content": "import sys\n\nfrom astroid import MANAGER\nfrom astroid import scoped_nodes\n\n\nIGNORE_ERRORS = {\n        ('attribute-defined-outside-init', ): [\n            'wa.workloads',\n            'wa.instruments',\n            'wa.output_procesors',\n        ]\n}\n\n\ndef register(linter):\n    pass\n\n\ndef transform(mod):\n    for errors, paths in IGNORE_ERRORS.items():\n        for path in paths:\n            if path in mod.name:\n                text = mod.stream().read()\n                if not text.strip():\n                    return\n\n                text = text.split(b'\\n')\n                # NOTE: doing it this way because the \"correct\" approach below does not\n                #       work. We can get away with this, because in well-formated WA files,\n                #       the initial line is the copyright header's blank line.\n                if b'pylint:' in text[0]:\n                    msg = 'pylint directive found on the first line of {}; please move to below copyright header'\n                    raise RuntimeError(msg.format(mod.name))\n                char = chr(text[0][0])\n                if text[0].strip() and char != '#':\n                    msg = 'first line of {} is not a comment; is the copyright header missing?'\n                    raise RuntimeError(msg.format(mod.name))\n                text[0] = '# pylint: disable={}'.format(','.join(errors)).encode('utf-8')\n                mod.file_bytes = b'\\n'.join(text)\n\n                # This is what *should* happen, but doesn't work.\n                # text.insert(0, '# pylint: disable=attribute-defined-outside-init')\n                # mod.file_bytes = '\\n'.join(text)\n                # mod.tolineno += 1\n\n\nMANAGER.register_transform(scoped_nodes.Module, transform)\n"
  },
  {
    "path": "dev_scripts/rebuild_all_uiauto",
    "content": "#!/bin/bash\n#\n# This script rebuilds all uiauto APKs as well as the base uiauto.arr. This is\n# useful when changes have been made to the base uiautomation classes and so\n# all automation needs to be rebuilt to link against the updated uiauto.arr.\nset -e\n\nSCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\nBASE_DIR=\"$SCRIPT_DIR/../wa/framework/uiauto\"\nWORKLOADS_DIR=\"$SCRIPT_DIR/../wa/workloads\"\n\npushd $BASE_DIR > /dev/null\necho \"building $(pwd)\"\n./build.sh\npopd > /dev/null\n\nfor uiauto_dir in $(find $WORKLOADS_DIR -type d -name uiauto); do\n    pushd $uiauto_dir > /dev/null\n    if [ -f build.sh ]; then\n        echo \"building $(pwd)\"\n        ./build.sh\n    fi\n    popd > /dev/null\ndone\n"
  },
  {
    "path": "dev_scripts/update_copyrights",
    "content": "#!/usr/bin/env python\n#\n# Script to put copyright headers into source files.\n#\nimport argparse\nimport logging\nimport os\nimport re\nimport string\nimport subprocess\nfrom datetime import datetime\n\nSOURCE_EXTENSIONS = {\n    '.py': ('#', '#', '#'),\n    '.sh': ('#', '#', '#'),\n    '.java': ('/*', '*/', ' *'),\n    '.c': ('/*', '*/', ' *'),\n    '.h': ('/*', '*/', ' *'),\n    '.cpp': ('/*', '*/', ' *'),\n}\n\nOLD_HEADER_TEMPLATE = string.Template(\n\"\"\"${begin_symbol} $$Copyright:\n${symbol} ----------------------------------------------------------------\n${symbol} This confidential and proprietary software may be used only as\n${symbol} authorised by a licensing agreement from ARM Limited\n${symbol}  (C) COPYRIGHT ${year} ARM Limited\n${symbol}       ALL RIGHTS RESERVED\n${symbol} The entire notice above must be reproduced on all authorised\n${symbol} copies and copies may only be made to the extent permitted\n${symbol} by a licensing agreement from ARM Limited.\n${symbol} ----------------------------------------------------------------\n${symbol} File:        ${file}\n${symbol} ----------------------------------------------------------------\n${symbol} $$\n${end_symbol}\n\"\"\"\n)\n\nHEADER_TEMPLATE = string.Template(\n\"\"\"${begin_symbol}    Copyright ${year} ARM Limited\n${symbol}\n${symbol} Licensed under the Apache License, Version 2.0 (the \"License\");\n${symbol} you may not use this file except in compliance with the License.\n${symbol} You may obtain a copy of the License at\n${symbol}\n${symbol}     http://www.apache.org/licenses/LICENSE-2.0\n${symbol}\n${symbol} Unless required by applicable law or agreed to in writing, software\n${symbol} distributed under the License is distributed on an \"AS IS\" BASIS,\n${symbol} WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n${symbol} See the License for the specific language governing permissions and\n${symbol} limitations under the License.\n${end_symbol}\n\"\"\"\n)\n\n# Minimum length, in characters, of a copy right header.\nMIN_HEADER_LENGTH = 500\n\nOLD_COPYRIGHT_REGEX = re.compile(r'\\(C\\) COPYRIGHT\\s+(?:(\\d+)-)?(\\d+)')\nCOPYRIGHT_REGEX = re.compile(r'Copyright\\s+(?:(\\d+)\\s*[-,]\\s*)?(\\d+) ARM Limited')\n\nDEFAULT_EXCLUDE_PATHS = [\n    os.path.join('wa', 'commands', 'templates'),\n]\n\n\nlogging.basicConfig(level=logging.INFO, format='%(levelname)-8s %(message)s')\n\n\ndef remove_old_copyright(filepath):\n    begin_symbol, end_symbol, symbol = SOURCE_EXTENSIONS[ext.lower()]\n    header = HEADER_TEMPLATE.substitute(begin_symbol=begin_symbol,\n                                        end_symbol=end_symbol,\n                                        symbol=symbol,\n                                        year='0',\n                                        file=os.path.basename(filepath))\n    header_line_count = len(header.splitlines())\n    with open(filepath) as fh:\n        lines = fh.readlines()\n    for i, line in enumerate(lines):\n        if OLD_COPYRIGHT_REGEX.search(line):\n            start_line = i -4\n            break\n    lines = lines[0:start_line] + lines[start_line + header_line_count:]\n    return ''.join(lines)\n\n\ndef add_copyright_header(filepath, year):\n    _, ext = os.path.splitext(filepath)\n    begin_symbol, end_symbol, symbol = SOURCE_EXTENSIONS[ext.lower()]\n    with open(filepath) as fh:\n        text = fh.read()\n    match = OLD_COPYRIGHT_REGEX.search(text)\n    if match:\n        _, year = update_year(text, year, copyright_regex=OLD_COPYRIGHT_REGEX)\n        text = remove_old_copyright(filepath)\n    header = HEADER_TEMPLATE.substitute(begin_symbol=begin_symbol,\n                                        end_symbol=end_symbol,\n                                        symbol=symbol,\n                                        year=year)\n    if text.strip().startswith('#!') or text.strip().startswith('# -*-'):\n        first_line, rest = text.split('\\n', 1)\n        updated_text = '\\n'.join([first_line, header, rest])\n    else:\n        updated_text = '\\n'.join([header, text])\n    with open(filepath, 'w') as wfh:\n        wfh.write(updated_text)\n\n\ndef update_year(text, year, copyright_regex=COPYRIGHT_REGEX, match=None):\n    if match is None:\n        match = copyright_regex.search(text)\n    old_year = match.group(1) or match.group(2)\n    updated_year_text = 'Copyright {}-{} ARM Limited'.format(old_year, year)\n    if old_year == year:\n        ret_year = '{}'.format(year)\n    else:\n        ret_year = '{}-{}'.format(old_year, year)\n    return (text.replace(match.group(0), updated_year_text), ret_year)\n\n\ndef get_git_year(path):\n    info = subprocess.check_output('git log -n 1 {}'.format(os.path.basename(path)),\n                                   shell=True, cwd=os.path.dirname(path))\n    if not info.strip():\n        return None\n\n    i = 1\n    while 'copyright' in info.lower():\n        info = subprocess.check_output('git log -n 1 --skip {} {}'.format(i, os.path.basename(path)),\n                                    shell=True, cwd=os.path.dirname(path))\n        if not info.strip():\n            return None\n\n    info_split_lines = info.split('\\n')\n    info_split_words = info_split_lines[2].split()\n    return int(info_split_words[5])\n\n\nif __name__ == '__main__':\n    parser = argparse.ArgumentParser()\n    parser.add_argument('path', help='Location to add copyrights to source files in.')\n    parser.add_argument('-n', '--update-no-ext', action='store_true',\n                        help='Will update files with on textension using # as the comment symbol.')\n    parser.add_argument('-x', '--exclude', action='append',\n                        help='Exclude this directory form the scan. May be used multiple times.')\n    args = parser.parse_args()\n\n    if args.update_no_ext:\n        SOURCE_EXTENSIONS[''] = ('#', '#', '#')\n\n    exclude_paths = DEFAULT_EXCLUDE_PATHS + (args.exclude or [])\n\n    current_year = datetime.now().year\n    for root, dirs, files in os.walk(args.path):\n        should_skip = False\n        for exclude_path in exclude_paths:\n            if exclude_path in os.path.realpath(root):\n                should_skip = True\n                break\n        if should_skip:\n            logging.info('Skipping {}'.format(root))\n            continue\n\n        logging.info('Checking {}'.format(root))\n        for entry in files:\n            _, ext = os.path.splitext(entry)\n            if ext.lower() in SOURCE_EXTENSIONS:\n                filepath = os.path.join(root, entry)\n                should_skip = False\n                for exclude_path in exclude_paths:\n                    if exclude_path in os.path.realpath(filepath):\n                        should_skip = True\n                        break\n                if should_skip:\n                    logging.info('\\tSkipping {}'.format(entry))\n                    continue\n                with open(filepath) as fh:\n                    text = fh.read()\n                if not text.strip():\n                    logging.info('\\tSkipping empty  {}'.format(entry))\n                    continue\n\n                year_modified = get_git_year(filepath) or current_year\n                if len(text) < MIN_HEADER_LENGTH:\n                    logging.info('\\tAdding header to {}'.format(entry))\n                    add_copyright_header(filepath, year_modified)\n                else:\n                    first_chunk = text[:MIN_HEADER_LENGTH]\n                    match = COPYRIGHT_REGEX.search(first_chunk)\n                    if not match:\n                        if OLD_COPYRIGHT_REGEX.search(first_chunk):\n                            logging.warn('\\tOld copyright message detected and replaced in {}'.format(entry))\n                            add_copyright_header(filepath, year_modified)\n                        elif '(c)' in first_chunk or '(C)' in first_chunk:\n                            logging.warn('\\tAnother copyright header appears to be in {}'.format(entry))\n                        else:\n                            logging.info('\\tAdding header to {}'.format(entry))\n                            add_copyright_header(filepath, current_year)\n                    else:\n                        # Found an existing copyright header. Update the\n                        # year if needed, otherwise, leave it alone.\n                        last_year = int(match.group(2))\n                        if year_modified > last_year:\n                            logging.info('\\tUpdating year in {}'.format(entry))\n                            text, _ = update_year(text, year_modified, COPYRIGHT_REGEX, match)\n                            with open(filepath, 'w') as wfh:\n                                wfh.write(text)\n                        else:\n                            logging.info('\\t{}: OK'.format(entry))\n"
  },
  {
    "path": "doc/Makefile",
    "content": "# Makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = sphinx-build\nPAPER         =\nBUILDDIR      = build\n\nSPHINXAPI     = sphinx-apidoc\nSPHINXAPIOPTS =\n\nWAEXT         = ./build_plugin_docs.py\nWAEXTOPTS     = source/plugins ../wa ../wa/tests ../wa/framework\n\n\n# Internal variables.\nPAPEROPT_a4     = -D latex_paper_size=a4\nPAPEROPT_letter = -D latex_paper_size=letter\nALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source\n# the i18n builder cannot share the environment and doctrees with the others\nI18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source\n\n.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext\n\nhelp:\n\t@echo \"Please use \\`make <target>' where <target> is one of\"\n\t@echo \"  html       to make standalone HTML files\"\n\t@echo \"  dirhtml    to make HTML files named index.html in directories\"\n\t@echo \"  singlehtml to make a single large HTML file\"\n\t@echo \"  pickle     to make pickle files\"\n\t@echo \"  json       to make JSON files\"\n\t@echo \"  htmlhelp   to make HTML files and a HTML help project\"\n\t@echo \"  qthelp     to make HTML files and a qthelp project\"\n\t@echo \"  devhelp    to make HTML files and a Devhelp project\"\n\t@echo \"  epub       to make an epub\"\n\t@echo \"  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\"\n\t@echo \"  latexpdf   to make LaTeX files and run them through pdflatex\"\n\t@echo \"  text       to make text files\"\n\t@echo \"  man        to make manual pages\"\n\t@echo \"  texinfo    to make Texinfo files\"\n\t@echo \"  info       to make Texinfo files and run them through makeinfo\"\n\t@echo \"  gettext    to make PO message catalogs\"\n\t@echo \"  changes    to make an overview of all changed/added/deprecated items\"\n\t@echo \"  linkcheck  to check all external links for integrity\"\n\t@echo \"  doctest    to run all doctests embedded in the documentation (if enabled)\"\n\t@echo \"  coverage   to run documentation coverage checks\"\n\nclean:\n\trm -rf $(BUILDDIR)/*\n\trm -rf source/plugins/*\n\trm -rf source/developer_guide/instrument_method_map.rst\n\trm -rf source/run_config/*\n\ncoverage:\n\t$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage\n\t@echo\n\t@echo \"Build finished. The coverage reports are in $(BUILDDIR)/coverage.\"\n\nhtml:\n\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/html.\"\n\ndirhtml:\n\t$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/dirhtml.\"\n\nsinglehtml:\n\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml\n\t@echo\n\t@echo \"Build finished. The HTML page is in $(BUILDDIR)/singlehtml.\"\n\npickle:\n\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle\n\t@echo\n\t@echo \"Build finished; now you can process the pickle files.\"\n\njson:\n\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json\n\t@echo\n\t@echo \"Build finished; now you can process the JSON files.\"\n\nhtmlhelp:\n\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp\n\t@echo\n\t@echo \"Build finished; now you can run HTML Help Workshop with the\" \\\n\t      \".hhp project file in $(BUILDDIR)/htmlhelp.\"\n\nqthelp:\n\t$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp\n\t@echo\n\t@echo \"Build finished; now you can run \"qcollectiongenerator\" with the\" \\\n\t      \".qhcp project file in $(BUILDDIR)/qthelp, like this:\"\n\t@echo \"# qcollectiongenerator $(BUILDDIR)/qthelp/WorkloadAutomation2.qhcp\"\n\t@echo \"To view the help file:\"\n\t@echo \"# assistant -collectionFile $(BUILDDIR)/qthelp/WorkloadAutomation2.qhc\"\n\ndevhelp:\n\t$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp\n\t@echo\n\t@echo \"Build finished.\"\n\t@echo \"To view the help file:\"\n\t@echo \"# mkdir -p $$HOME/.local/share/devhelp/WorkloadAutomation2\"\n\t@echo \"# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/WorkloadAutomation2\"\n\t@echo \"# devhelp\"\n\nepub:\n\t$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub\n\t@echo\n\t@echo \"Build finished. The epub file is in $(BUILDDIR)/epub.\"\n\nlatex:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo\n\t@echo \"Build finished; the LaTeX files are in $(BUILDDIR)/latex.\"\n\t@echo \"Run \\`make' in that directory to run these through (pdf)latex\" \\\n\t      \"(use \\`make latexpdf' here to do that automatically).\"\n\nlatexpdf:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through pdflatex...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\ntext:\n\t$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text\n\t@echo\n\t@echo \"Build finished. The text files are in $(BUILDDIR)/text.\"\n\nman:\n\t$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man\n\t@echo\n\t@echo \"Build finished. The manual pages are in $(BUILDDIR)/man.\"\n\ntexinfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo\n\t@echo \"Build finished. The Texinfo files are in $(BUILDDIR)/texinfo.\"\n\t@echo \"Run \\`make' in that directory to run these through makeinfo\" \\\n\t      \"(use \\`make info' here to do that automatically).\"\n\ninfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo \"Running Texinfo files through makeinfo...\"\n\tmake -C $(BUILDDIR)/texinfo info\n\t@echo \"makeinfo finished; the Info files are in $(BUILDDIR)/texinfo.\"\n\ngettext:\n\t$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale\n\t@echo\n\t@echo \"Build finished. The message catalogs are in $(BUILDDIR)/locale.\"\n\nchanges:\n\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes\n\t@echo\n\t@echo \"The overview file is in $(BUILDDIR)/changes.\"\n\nlinkcheck:\n\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck\n\t@echo\n\t@echo \"Link check complete; look for any errors in the above output \" \\\n\t      \"or in $(BUILDDIR)/linkcheck/output.txt.\"\n\ndoctest:\n\t$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest\n\t@echo \"Testing of doctests in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/doctest/output.txt.\"\n"
  },
  {
    "path": "doc/build_instrument_method_map.py",
    "content": "#!/usr/bin/env python\n#    Copyright 2015-2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport os\nimport sys\nimport string\nfrom copy import copy\n\nfrom wa.framework.instrument import SIGNAL_MAP\nfrom wa.framework.signal import CallbackPriority\nfrom wa.utils.doc import format_simple_table\n\nOUTPUT_TEMPLATE_FILE =  os.path.join(os.path.dirname(__file__), 'source', 'instrument_method_map.template')\n\n\ndef generate_instrument_method_map(outfile):\n    signal_table = format_simple_table([(k, v) for k, v in SIGNAL_MAP.items()],\n                                       headers=['method name', 'signal'], align='<<')\n    decorator_names = map(lambda x: x.replace('high', 'fast').replace('low', 'slow'), CallbackPriority.names)\n    priority_table = format_simple_table(zip(decorator_names, CallbackPriority.names, CallbackPriority.values),\n            headers=['decorator', 'CallbackPriority name', 'CallbackPriority value'],  align='<>')\n    with open(OUTPUT_TEMPLATE_FILE) as fh:\n        template = string.Template(fh.read())\n    with open(outfile, 'w') as wfh:\n        wfh.write(template.substitute(signal_names=signal_table, priority_prefixes=priority_table))\n\n\nif __name__ == '__main__':\n    generate_instrument_method_map(sys.argv[1])\n"
  },
  {
    "path": "doc/build_plugin_docs.py",
    "content": "#!/usr/bin/env python\n#    Copyright 2014-2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\nimport sys\n\nfrom wa import pluginloader\nfrom wa.framework.configuration.core import RunConfiguration, MetaConfiguration\nfrom wa.framework.target.descriptor import list_target_descriptions\nfrom wa.utils.doc import (strip_inlined_text, get_rst_from_plugin,\n                          get_params_rst, underline, line_break)\nfrom wa.utils.misc import capitalize\n\nGENERATE_FOR_PACKAGES = [\n    'wa.workloads',\n    'wa.instruments',\n    'wa.output_processors',\n]\n\n\ndef insert_contents_table(title='', depth=1):\n    \"\"\"\n    Insert a sphinx directive to insert a contents page with\n    a configurable title and depth.\n    \"\"\"\n    text = '''\\n\n.. contents:: {}\n   :depth: {}\n   :local:\\n\n'''.format(title, depth)\n    return text\n\n\ndef generate_plugin_documentation(source_dir, outdir, ignore_paths):\n    # pylint: disable=unused-argument\n    pluginloader.clear()\n    pluginloader.update(packages=GENERATE_FOR_PACKAGES)\n    if not os.path.exists(outdir):\n        os.mkdir(outdir)\n\n    for ext_type in pluginloader.kinds:\n        outfile = os.path.join(outdir, '{}s.rst'.format(ext_type))\n        with open(outfile, 'w') as wfh:\n            wfh.write('.. _{}s:\\n\\n'.format(ext_type.replace('_', '-')))\n            title = ' '.join([capitalize(w) for w in ext_type.split('_')])\n            wfh.write(underline('{}s'.format(title)))\n            wfh.write(insert_contents_table())\n            wfh.write(line_break())\n            exts = pluginloader.list_plugins(ext_type)\n            sorted_exts = iter(sorted(exts, key=lambda x: x.name))\n            try:\n                wfh.write(get_rst_from_plugin(next(sorted_exts)))\n            except StopIteration:\n                return\n            for ext in sorted_exts:\n                wfh.write(line_break())\n                wfh.write(get_rst_from_plugin(ext))\n\n\ndef generate_target_documentation(outdir):\n    targets_to_generate = ['generic_android',\n                           'generic_linux',\n                           'generic_chromeos',\n                           'generic_local',\n                           'juno_linux',\n                           'juno_android']\n\n    intro = (\n        '\\nThis is a list of commonly used targets and their device '\n        'parameters, to see a complete for a complete reference please use the'\n        ' WA :ref:`list command <list-command>`.\\n\\n\\n'\n    )\n\n    pluginloader.clear()\n    pluginloader.update(packages=['wa.framework.target.descriptor'])\n\n    target_descriptors = list_target_descriptions(pluginloader)\n    outfile = os.path.join(outdir, 'targets.rst')\n    with open(outfile, 'w') as wfh:\n        wfh.write(underline('Common Targets'))\n        wfh.write(intro)\n        for td in sorted(target_descriptors, key=lambda t: t.name):\n            if td.name not in targets_to_generate:\n                continue\n            text = underline(td.name, '~')\n            if hasattr(td, 'description'):\n                desc = strip_inlined_text(td.description or '')\n                text += desc\n            text += underline('Device Parameters:', '-')\n            text += get_params_rst(td.conn_params)\n            text += get_params_rst(td.platform_params)\n            text += get_params_rst(td.target_params)\n            text += get_params_rst(td.assistant_params)\n            wfh.write(text)\n\n\ndef generate_run_config_documentation(outdir):\n    generate_config_documentation(RunConfiguration, outdir)\n\n\ndef generate_meta_config_documentation(outdir):\n    generate_config_documentation(MetaConfiguration, outdir)\n\n\ndef generate_config_documentation(config, outdir):\n    if not os.path.exists(outdir):\n        os.mkdir(outdir)\n\n    config_name = '_'.join(config.name.split())\n    outfile = os.path.join(outdir, '{}.rst'.format(config_name))\n    with open(outfile, 'w') as wfh:\n        wfh.write(get_params_rst(config.config_points))\n\n\nif __name__ == '__main__':\n    generate_plugin_documentation(sys.argv[2], sys.argv[1], sys.argv[3:])\n"
  },
  {
    "path": "doc/make.bat",
    "content": "@ECHO OFF\r\n\r\nREM Command file for Sphinx documentation\r\n\r\nif \"%SPHINXBUILD%\" == \"\" (\r\n\tset SPHINXBUILD=sphinx-build\r\n)\r\nset BUILDDIR=_build\r\nset ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .\r\nset I18NSPHINXOPTS=%SPHINXOPTS% .\r\nif NOT \"%PAPER%\" == \"\" (\r\n\tset ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%\r\n\tset I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%\r\n)\r\n\r\nif \"%1\" == \"\" goto help\r\n\r\nif \"%1\" == \"help\" (\r\n\t:help\r\n\techo.Please use `make ^<target^>` where ^<target^> is one of\r\n\techo.  html       to make standalone HTML files\r\n\techo.  dirhtml    to make HTML files named index.html in directories\r\n\techo.  singlehtml to make a single large HTML file\r\n\techo.  pickle     to make pickle files\r\n\techo.  json       to make JSON files\r\n\techo.  htmlhelp   to make HTML files and a HTML help project\r\n\techo.  qthelp     to make HTML files and a qthelp project\r\n\techo.  devhelp    to make HTML files and a Devhelp project\r\n\techo.  epub       to make an epub\r\n\techo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\r\n\techo.  text       to make text files\r\n\techo.  man        to make manual pages\r\n\techo.  texinfo    to make Texinfo files\r\n\techo.  gettext    to make PO message catalogs\r\n\techo.  changes    to make an overview over all changed/added/deprecated items\r\n\techo.  xml        to make Docutils-native XML files\r\n\techo.  pseudoxml  to make pseudoxml-XML files for display purposes\r\n\techo.  linkcheck  to check all external links for integrity\r\n\techo.  doctest    to run all doctests embedded in the documentation if enabled\r\n\techo.  coverage   to run coverage check of the documentation if enabled\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"clean\" (\r\n\tfor /d %%i in (%BUILDDIR%\\*) do rmdir /q /s %%i\r\n\tdel /q /s %BUILDDIR%\\*\r\n\tgoto end\r\n)\r\n\r\n\r\nREM Check if sphinx-build is available and fallback to Python version if any\r\n%SPHINXBUILD% 2> nul\r\nif errorlevel 9009 goto sphinx_python\r\ngoto sphinx_ok\r\n\r\n:sphinx_python\r\n\r\nset SPHINXBUILD=python -m sphinx.__init__\r\n%SPHINXBUILD% 2> nul\r\nif errorlevel 9009 (\r\n\techo.\r\n\techo.The 'sphinx-build' command was not found. Make sure you have Sphinx\r\n\techo.installed, then set the SPHINXBUILD environment variable to point\r\n\techo.to the full path of the 'sphinx-build' executable. Alternatively you\r\n\techo.may add the Sphinx directory to PATH.\r\n\techo.\r\n\techo.If you don't have Sphinx installed, grab it from\r\n\techo.http://sphinx-doc.org/\r\n\texit /b 1\r\n)\r\n\r\n:sphinx_ok\r\n\r\n\r\nif \"%1\" == \"html\" (\r\n\t%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/html.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"dirhtml\" (\r\n\t%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"singlehtml\" (\r\n\t%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pickle\" (\r\n\t%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the pickle files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"json\" (\r\n\t%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the JSON files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"htmlhelp\" (\r\n\t%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run HTML Help Workshop with the ^\r\n.hhp project file in %BUILDDIR%/htmlhelp.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"qthelp\" (\r\n\t%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run \"qcollectiongenerator\" with the ^\r\n.qhcp project file in %BUILDDIR%/qthelp, like this:\r\n\techo.^> qcollectiongenerator %BUILDDIR%\\qthelp\\devlib.qhcp\r\n\techo.To view the help file:\r\n\techo.^> assistant -collectionFile %BUILDDIR%\\qthelp\\devlib.ghc\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"devhelp\" (\r\n\t%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"epub\" (\r\n\t%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The epub file is in %BUILDDIR%/epub.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latex\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; the LaTeX files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdf\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf\r\n\tcd %~dp0\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdfja\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf-ja\r\n\tcd %~dp0\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"text\" (\r\n\t%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The text files are in %BUILDDIR%/text.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"man\" (\r\n\t%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The manual pages are in %BUILDDIR%/man.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"texinfo\" (\r\n\t%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"gettext\" (\r\n\t%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The message catalogs are in %BUILDDIR%/locale.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"changes\" (\r\n\t%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.The overview file is in %BUILDDIR%/changes.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"linkcheck\" (\r\n\t%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Link check complete; look for any errors in the above output ^\r\nor in %BUILDDIR%/linkcheck/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"doctest\" (\r\n\t%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Testing of doctests in the sources finished, look at the ^\r\nresults in %BUILDDIR%/doctest/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"coverage\" (\r\n\t%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Testing of coverage in the sources finished, look at the ^\r\nresults in %BUILDDIR%/coverage/python.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"xml\" (\r\n\t%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The XML files are in %BUILDDIR%/xml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pseudoxml\" (\r\n\t%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.\r\n\tgoto end\r\n)\r\n\r\n:end\r\n"
  },
  {
    "path": "doc/requirements.txt",
    "content": "nose\nnumpy\npandas\nsphinx_rtd_theme==1.0.0\nsphinx==4.2\ndocutils<0.18\ndevlib @ git+https://github.com/ARM-software/devlib@master\n"
  },
  {
    "path": "doc/source/api/output.rst",
    "content": ".. _output_processing_api:\n\nOutput\n======\n\nA WA output directory can be accessed via a :class:`RunOutput` object. There are\ntwo ways of getting one -- either instantiate it with a path to a WA output\ndirectory, or use :func:`discover_wa_outputs` to traverse a directory tree\niterating over all WA output directories found.\n\n.. function:: discover_wa_outputs(path)\n\n    Recursively traverse ``path`` looking for WA output directories. Return\n    an iterator over :class:`RunOutput` objects for each discovered output.\n\n    :param path: The directory to scan for WA output\n\n\n.. class:: RunOutput(path)\n\n    The main interface into a WA output directory.\n\n    :param path: must be the path to the top-level output directory (the one\n                 containing ``__meta`` subdirectory and ``run.log``).\n\nWA output stored in a Postgres database by the ``Postgres`` output processor\ncan be accessed via a :class:`RunDatabaseOutput` which can be initialized as follows:\n\n.. class:: RunDatabaseOutput(password, host='localhost', user='postgres', port='5432', dbname='wa', run_uuid=None, list_runs=False)\n\n    The main interface into Postgres database containing WA results.\n\n    :param password: The password used to authenticate with\n    :param host: The database host address. Defaults to ``'localhost'``\n    :param user: The user name used to authenticate with. Defaults to ``'postgres'``\n    :param port: The database connection port number. Defaults to ``'5432'``\n    :param dbname: The database name. Defaults to ``'wa'``\n    :param run_uuid: The ``run_uuid`` to identify the selected run\n    :param list_runs: Will connect to the database and will print out the available runs\n            with their corresponding run_uuids. Defaults to ``False``\n\n\nExample\n-------\n\n.. seealso:: :ref:`processing_output`\n\nTo demonstrate how we can use the output API if we have an existing WA output\ncalled ``wa_output`` in the current working directory we can initialize a\n``RunOutput`` as follows:\n\n.. code-block:: python\n\n    In [1]: from wa import RunOutput\n       ...:\n       ...: output_directory = 'wa_output'\n       ...: run_output = RunOutput(output_directory)\n\nAlternatively if the results have been stored in a Postgres database we can\ninitialize a ``RunDatabaseOutput`` as follows:\n\n.. code-block:: python\n\n    In [1]: from wa import RunDatabaseOutput\n       ...:\n       ...: db_settings = {\n       ...:                host: 'localhost',\n       ...:                port: '5432',\n       ...:                dbname: 'wa'\n       ...:                user: 'postgres',\n       ...:                password: 'wa'\n       ...:                }\n       ...:\n       ...: RunDatabaseOutput(list_runs=True, **db_settings)\n    Available runs are:\n    ========= ============ ============= =================== =================== ====================================\n     Run Name      Project Project Stage          Start Time            End Time                             run_uuid\n    ========= ============ ============= =================== =================== ====================================\n    Test Run    my_project          None 2018-11-29 14:53:08 2018-11-29 14:53:24 aa3077eb-241a-41d3-9610-245fd4e552a9\n    run_1       my_project          None 2018-11-29 14:53:34 2018-11-29 14:53:37 4c2885c9-2f4a-49a1-bbc5-b010f8d6b12a\n    ========= ============ ============= =================== =================== ====================================\n\n    In [2]: run_uuid = '4c2885c9-2f4a-49a1-bbc5-b010f8d6b12a'\n       ...: run_output = RunDatabaseOutput(run_uuid=run_uuid, **db_settings)\n\n\nFrom here we can retrieve various information about the run. For example if we\nwant to see what the overall status of the run was, along with the runtime\nparameters and the metrics recorded from the first job was we can do the following:\n\n.. code-block:: python\n\n    In [2]: run_output.status\n    Out[2]: OK(7)\n\n    # List all of the jobs for the run\n    In [3]: run_output.jobs\n    Out[3]:\n    [<wa.framework.output.JobOutput at 0x7f70358a1f10>,\n     <wa.framework.output.JobOutput at 0x7f70358a1150>,\n     <wa.framework.output.JobOutput at 0x7f7035862810>,\n     <wa.framework.output.JobOutput at 0x7f7035875090>]\n\n    # Examine the first job that was ran\n    In [4]: job_1 = run_output.jobs[0]\n\n    In [5]: job_1.label\n    Out[5]: u'dhrystone'\n\n    # Print out all the runtime parameters and their values for this job\n    In [6]: for k, v in job_1.spec.runtime_parameters.items():\n       ...:     print (k, v)\n    (u'airplane_mode': False)\n    (u'brightness': 100)\n    (u'governor': 'userspace')\n    (u'big_frequency': 1700000)\n    (u'little_frequency': 1400000)\n\n    # Print out all the metrics available for this job\n    In [7]: job_1.metrics\n    Out[7]:\n    [<thread 0 score: 14423105 (+)>,\n     <thread 0 DMIPS: 8209 (+)>,\n     <thread 1 score: 14423105 (+)>,\n     <thread 1 DMIPS: 8209 (+)>,\n     <thread 2 score: 14423105 (+)>,\n     <thread 2 DMIPS: 8209 (+)>,\n     <thread 3 score: 18292638 (+)>,\n     <thread 3 DMIPS: 10411 (+)>,\n     <thread 4 score: 17045532 (+)>,\n     <thread 4 DMIPS: 9701 (+)>,\n     <thread 5 score: 14150917 (+)>,\n     <thread 5 DMIPS: 8054 (+)>,\n     <time: 0.184497 seconds (-)>,\n     <total DMIPS: 52793 (+)>,\n     <total score: 92758402 (+)>]\n\n    # Load the run results csv file into pandas\n    In [7]: pd.read_csv(run_output.get_artifact_path('run_result_csv'))\n    Out[7]:\n                id   workload  iteration          metric          value    units\n    0   450000-wk1  dhrystone          1  thread 0 score   1.442310e+07      NaN\n    1   450000-wk1  dhrystone          1  thread 0 DMIPS   8.209700e+04      NaN\n    2   450000-wk1  dhrystone          1  thread 1 score   1.442310e+07      NaN\n    3   450000-wk1  dhrystone          1  thread 1 DMIPS   8.720900e+04      NaN\n    ...\n\n\nWe can also retrieve information about the target that the run was performed on\nfor example:\n\n.. code-block:: python\n\n    # Print out the target's abi:\n    In [9]: run_output.target_info.abi\n    Out[9]: u'arm64'\n\n    # The os the target was running\n    In [9]: run_output.target_info.os\n    Out[9]: u'android'\n\n    # And other information about the os version\n    In [10]: run_output.target_info.os_version\n    Out[10]:\n    OrderedDict([(u'all_codenames', u'REL'),\n                 (u'incremental', u'3687331'),\n                 (u'preview_sdk', u'0'),\n                 (u'base_os', u''),\n                 (u'release', u'7.1.1'),\n                 (u'codename', u'REL'),\n                 (u'security_patch', u'2017-03-05'),\n                 (u'sdk', u'25')])\n\n\n\n:class:`RunOutput`\n------------------\n\n:class:`RunOutput` provides access to the output of a WA :term:`run`, including metrics,\nartifacts, metadata, and configuration. It has the following attributes:\n\n\n``jobs``\n    A list of :class:`JobOutput` objects for each job that was executed during\n    the run.\n\n``status``\n    Run status. This indicates whether the run has completed without problems\n    (``Status.OK``) or if there were issues.\n\n``metrics``\n    A list of :class:`Metric`\\ s for the run.\n\n    .. note:: these are *overall run* metrics only. Metrics for individual\n              jobs are contained within the corresponding :class:`JobOutput`\\ s.\n\n``artifacts``\n    A list of :class:`Artifact`\\ s for the run. These are usually backed by a\n    file and can contain traces, raw data, logs, etc.\n\n    .. note:: these are *overall run* artifacts only. Artifacts for individual\n              jobs are contained within the corresponding :class:`JobOutput`\\ s.\n\n``info``\n  A :ref:`RunInfo <run-info-api>` object that contains information about the run\n  itself for example it's duration, name, uuid etc.\n\n``target_info``\n  A :ref:`TargetInfo <target-info-api>` object which can be used to access\n  various information about the target that was used during the run for example\n  it's ``abi``, ``hostname``, ``os`` etc.\n\n``run_config``\n  A :ref:`RunConfiguration <run-configuration>` object that can be used to\n  access all the configuration of the run itself, for example the\n  ``reboot_policy``, ``execution_order``, ``device_config`` etc.\n\n``classifiers``\n  :ref:`classifiers <classifiers>` defined for the entire run.\n\n``metadata``\n  :ref:`metadata  <metadata>` associated with the run.\n\n``events``\n  A list of any events logged during the run, that are not associated with a\n  particular job.\n\n``event_summary``\n  A condensed summary of any events that occurred during the run.\n\n``augmentations``\n  A list of the :term:`augmentation`\\ s that were enabled during the run (these\n  augmentations may or may not have been active for a particular job).\n\n``basepath``\n  A (relative) path to the WA output directory backing this object.\n\n\nmethods\n~~~~~~~\n\n.. method:: RunOutput.get_artifact(name)\n\n    Return the :class:`Artifact` specified by ``name``. This will only look\n    at the run artifacts; this will not search the artifacts of the individual\n    jobs.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: The :class:`Artifact` with that name\n    :raises HostError: If the artifact with the specified name does not exist.\n\n\n.. method:: RunOutput.get_artifact_path(name)\n\n    Return the path to the file backing the artifact specified by ``name``. This\n    will only look at the run artifacts; this will not search the artifacts of\n    the individual jobs.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: The path to the artifact\n    :raises HostError: If the artifact with the specified name does not exist.\n\n\n.. method:: RunOutput.get_metric(name)\n\n   Return the :class:`Metric` associated with the run (not the individual jobs)\n   with the specified `name`.\n\n   :return: The :class:`Metric` object for the metric with the specified name.\n\n\n.. method:: RunOutput.get_job_spec(spec_id)\n\n   Return the :class:`JobSpec` with the specified `spec_id`. A :term:`spec`\n   describes the job to be executed. Each :class:`Job` has an associated\n   :class:`JobSpec`, though a single :term:`spec` can be associated with\n   multiple :term:`job`\\ s (If the :term:`spec` specifies multiple iterations).\n\n.. method:: RunOutput.list_workloads()\n\n    List unique  workload labels that featured in this run. The labels will be\n    in the order in which they first ran.\n\n    :return: A list of `str` labels of workloads that were part of this run.\n\n\n.. method:: RunOutput.add_classifier(name, value, overwrite=False)\n\n   Add a classifier to the run as a whole. If a classifier with the specified\n   ``name`` already exists, a``ValueError`` will be raised, unless\n   `overwrite=True` is specified.\n\n\n:class:`RunDatabaseOutput`\n---------------------------\n\n:class:`RunDatabaseOutput` provides access to the output of a WA :term:`run`,\nincluding metrics,artifacts, metadata, and configuration stored in a postgres database.\nThe majority of attributes and methods are the same :class:`RunOutput` however the\nnoticeable differences are:\n\n``jobs``\n    A list of :class:`JobDatabaseOutput` objects for each job that was executed\n    during the run.\n\n``basepath``\n  A representation of the current database and host information backing this object.\n\nmethods\n~~~~~~~\n\n.. method:: RunDatabaseOutput.get_artifact(name)\n\n    Return the :class:`Artifact` specified by ``name``. This will only look\n    at the run artifacts; this will not search the artifacts of the individual\n    jobs. The `path` attribute of the :class:`Artifact` will be set to the Database OID of the object.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: The :class:`Artifact` with that name\n    :raises HostError: If the artifact with the specified name does not exist.\n\n\n.. method:: RunDatabaseOutput.get_artifact_path(name)\n\n    If the artifcat is a file this method returns a `StringIO` object containing\n    the contents of the artifact specified by ``name``. If the aritifcat is a\n    directory, the method returns a path to a locally extracted version of the\n    directory which is left to the user to remove after use. This will only look\n    at the run artifacts; this will not search the artifacts of the individual\n    jobs.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: A `StringIO` object with the contents of the artifact\n    :raises HostError: If the artifact with the specified name does not exist.\n\n\n:class:`JobOutput`\n------------------\n\n:class:`JobOutput` provides access to the output of a single :term:`job`\nexecuted during a WA :term:`run`, including metrics,\nartifacts, metadata, and configuration. It has the following attributes:\n\n``status``\n    Job status. This indicates whether the job has completed without problems\n    (``Status.OK``) or if there were issues.\n\n    .. note:: Under typical configuration, WA will make a number of attempts to\n              re-run a job in case of issue. This status (and the rest of the\n\t      output) will represent the the latest attempt. I.e. a\n\t      ``Status.OK`` indicates that the latest attempt was successful,\n\t      but it does mean that there weren't prior failures. You can check\n\t      the ``retry`` attribute (see below) to whether this was the first\n\t      attempt or not.\n\n``retry``\n   Retry number for this job. If a problem is detected during job execution, the\n   job will be re-run up to :confval:`max_retries` times. This indicates the\n   final retry number for the output. A value of ``0`` indicates that the job\n   succeeded on the first attempt, and no retries were necessary.\n\n   .. note:: Outputs for previous attempts are moved into ``__failed``\n             subdirectory of WA output. These are currently not exposed via the\n\t     API.\n\n``id``\n    The ID of the :term:`spec` associated with with job. This ID is unique to\n    the spec, but not necessary to the job -- jobs representing multiple\n    iterations of the same spec will share the ID.\n\n``iteration``\n    The iteration number of this job. Together with the ``id`` (above), this\n    uniquely identifies a job with a run.\n\n``label``\n    The workload label associated with this job. Usually, this will be the name\n    or :term:`alias` of the workload, however maybe overwritten by the user in\n    the :term:`agenda`.\n\n``metrics``\n    A list of :class:`Metric`\\ s for the job.\n\n``artifacts``\n    A list of :class:`Artifact`\\ s for the job These are usually backed by a\n    file and can contain traces, raw data, logs, etc.\n\n``classifiers``\n  :ref:`classifiers <classifiers>` defined for the job.\n\n``metadata``\n  :ref:`metadata  <metadata>` associated with the job.\n\n``events``\n  A list of any events logged during the execution of the job.\n\n``event_summary``\n  A condensed summary of any events that occurred during the execution of the\n  job.\n\n``augmentations``\n  A list of the :term:`augmentation`\\ s that were enabled for this job. This may\n  be different from overall augmentations specified for the run, as they may be\n  enabled/disabled on per-job basis.\n\n``basepath``\n  A (relative) path to the WA output directory backing this object.\n\n\nmethods\n~~~~~~~\n\n.. method:: JobOutput.get_artifact(name)\n\n    Return the :class:`Artifact` specified by ``name`` associated with this job.\n\n    :param name:  The name of the artifact to retrieve.\n    :return: The :class:`Artifact` with that name\n    :raises HostError: If the artifact with the specified name does not exist.\n\n.. method:: JobOutput.get_artifact_path(name)\n\n    Return the path to the file backing the artifact specified by ``name``,\n    associated with this job.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: The path to the artifact\n    :raises HostError: If the artifact with the specified name does not exist.\n\n.. method:: JobOutput.get_metric(name)\n\n   Return the :class:`Metric` associated with this job with the specified\n   `name`.\n\n   :return: The :class:`Metric` object for the metric with the specified name.\n\n.. method:: JobOutput.add_classifier(name, value, overwrite=False)\n\n   Add a classifier to the job. The classifier will be propagated to all\n   existing artifacts and metrics, as well as those added afterwards. If a\n   classifier with the specified ``name`` already exists, a ``ValueError`` will\n   be raised, unless `overwrite=True` is specified.\n\n\n:class:`JobDatabaseOutput`\n---------------------------\n\n:class:`JobOutput` provides access to the output of a single :term:`job`\nexecuted during a WA :term:`run`, including metrics, artifacts, metadata, and\nconfiguration stored in a postgres database.\nThe majority of attributes and methods are the same :class:`JobOutput` however the\nnoticeable differences are:\n\n``basepath``\n  A representation of the current database and host information backing this object.\n\n\nmethods\n~~~~~~~\n\n.. method:: JobDatabaseOutput.get_artifact(name)\n\n    Return the :class:`Artifact` specified by ``name`` associated with this job.\n    The `path` attribute of the :class:`Artifact` will be set to the Database\n    OID of the object.\n\n    :param name:  The name of the artifact to retrieve.\n    :return: The :class:`Artifact` with that name\n    :raises HostError: If the artifact with the specified name does not exist.\n\n.. method:: JobDatabaseOutput.get_artifact_path(name)\n\n    If the artifcat is a file this method returns a `StringIO` object containing\n    the contents of the artifact specified by ``name`` associated with this job.\n    If the aritifcat is a directory, the method returns a path to a locally\n    extracted version of the directory which is left to the user to remove after\n    use.\n\n    :param name:  The name of the artifact who's path to retrieve.\n    :return: A `StringIO` object with the contents of the artifact\n    :raises HostError: If the artifact with the specified name does not exist.\n\n\n:class:`Metric`\n---------------\n\nA metric represent a single numerical measurement/score collected as a result of\nrunning the workload. It would be generated either by the workload or by one of\nthe augmentations active during the execution of the workload.\n\nA :class:`Metric` has the following attributes:\n\n``name``\n    The name of the metric.\n\n    .. note:: A name of the metric is not necessarily unique, even for the same\n              job. Some workloads internally run multiple sub-tests, each\n              generating a metric with the same name. In such cases,\n              :term:`classifier`\\ s are used to distinguish between them.\n\n``value``\n    The value of the metrics collected.\n\n\n``units``\n    The units of the metrics. This maybe ``None`` if the metric has no units.\n\n\n``lower_is_better``\n    The default assumption is that higher metric values are better. This may be\n    overridden by setting this to ``True``, e.g. if metrics such as \"run time\"\n    or \"latency\". WA does not use this internally (at the moment) but this may\n    be used by external parties to sensibly process WA results in a generic way.\n\n\n``classifiers``\n    These can be user-defined :term:`classifier`\\ s propagated from the job/run,\n    or they may have been added by the workload to help distinguish between\n    otherwise identical metrics.\n\n``label``\n    This is a string constructed from the name and classifiers, to provide a\n    more unique identifier, e.g. for grouping values across iterations. The\n    format is in the form ``name/cassifier1=value1/classifier2=value2/...``.\n\n\n:class:`Artifact`\n-----------------\n\nAn artifact is a file that is created on the host as part of executing a\nworkload. This could be trace, logging, raw output, or pretty much anything\nelse. Pretty much every file under WA output directory that is not already\nrepresented by some other framework object will have an :class:`Artifact`\nassociated with it.\n\nAn :class:`Artifact` has  the following attributes:\n\n\n``name``\n    The name of this artifact. This will be unique for the job/run (unlike\n    metric names). This is intended as a consistent \"handle\" for this artifact.\n    The actual file name for the artifact may vary from job to job (e.g. some\n    benchmarks that create files with results include timestamps in the file\n    names), however the name will always be the same.\n\n``path``\n    Partial path to the file associated with this artifact. Often, this is just\n    the file name. To get the complete path that maybe used to access the file,\n    use :func:`get_artifact_path` of the corresponding output object.\n\n\n``kind``\n    Describes the nature of this artifact to facilitate generic processing.\n    Possible kinds are:\n\n    :log: A log file. Not part of the \"output\" as such but contains\n            information about the run/workload execution that be useful for\n            diagnostics/meta analysis.\n    :meta: A file containing metadata. This is not part of the \"output\", but\n            contains information that may be necessary to reproduce the\n            results (contrast with ``log`` artifacts which are *not*\n            necessary).\n    :data: This file contains new data, not available otherwise and should\n            be considered part of the \"output\" generated by WA. Most traces\n            would fall into this category.\n    :export: Exported version of results or some other artifact. This\n                signifies that this artifact does not contain any new data\n                that is not available elsewhere and that it may be safely\n                discarded without losing information.\n    :raw: Signifies that this is a raw dump/log that is normally processed\n            to extract useful information and is then discarded. In a sense,\n            it is the opposite of ``export``, but in general may also be\n            discarded.\n\n            .. note:: Whether a file is marked as ``log``/``data`` or ``raw``\n                    depends on how important it is to preserve this file,\n                    e.g. when archiving, vs how much space it takes up.\n                    Unlike ``export`` artifacts which are (almost) always\n                    ignored by other exporters as that would never result\n                    in data loss, ``raw`` files *may* be processed by\n                    exporters if they decided that the risk of losing\n                    potentially (though unlikely) useful data is greater\n                    than the time/space cost of handling the artifact (e.g.\n                    a database uploader may choose to ignore ``raw``\n                    artifacts, where as a network filer archiver may choose\n                    to archive them).\n\n    .. note:: The kind parameter is intended to represent the logical\n              function of a particular artifact, not it's intended means of\n              processing -- this is left entirely up to the output\n              processors.\n\n``description``\n    This may be used by the artifact's creator to provide additional free-form\n    information about the artifact. In practice, this is often ``None``\n\n\n``classifiers``\n    Job- and run-level :term:`classifier`\\ s will be propagated to the artifact.\n\n\nAdditional run info\n-------------------\n\n:class:`RunOutput` object has ``target_info``  and ``run_info`` attributes that\ncontain structures that provide additional information about the run and device.\n\n.. _target-info-api:\n\n:class:`TargetInfo`\n~~~~~~~~~~~~~~~~~~~\n\nThe :class:`TargetInfo` class presents various pieces of information about the\ntarget device. An instance of this class will be instantiated and populated\nautomatically from the devlib `target\n<http://devlib.readthedocs.io/en/latest/target.html>`_ created during a WA run\nand serialized to a json file as part of the metadata exported\nby WA at the end of a run.\n\nThe available attributes of the class are as follows:\n\n``target``\n    The name of the target class that was uised ot interact with the device\n    during the run E.g.  ``\"AndroidTarget\"``, ``\"LinuxTarget\"`` etc.\n\n``modules``\n    A list of names of modules that have been loaded by the target. Modules\n    provide additional functionality, such as access to ``cpufreq`` and which\n    modules are installed may impact how much of the ``TargetInfo`` has been\n    populated.\n\n``cpus``\n    A list of :class:`CpuInfo` objects describing the capabilities of each CPU.\n\n``os``\n    A generic name of the OS the target was running (e.g. ``\"android\"``).\n\n``os_version``\n    A dict that contains a mapping of OS version elements to their values. This\n    mapping is OS-specific.\n\n``abi``\n    The ABI of the target device.\n\n``hostname``\n    The hostname of the the device the run was executed on.\n\n``is_rooted``\n    A boolean value specifying whether root was detected on the device.\n\n``kernel_version``\n    The version of the kernel on the target device.  This returns a\n    :class:`KernelVersion` instance that has separate version and release\n    fields.\n\n``kernel_config``\n    A :class:`KernelConfig` instance that contains parsed kernel config from the\n    target device. This may be ``None`` if the kernel config could not be\n    extracted.\n\n``sched_features``\n    A list of the available tweaks to the scheduler, if available from the\n    device.\n\n``hostid``\n    The unique identifier of the particular device the WA run was executed on.\n\n\n.. _run-info-api:\n\n:class:`RunInfo`\n~~~~~~~~~~~~~~~~\n\nThe :class:`RunInfo` provides general run information. It has the following\nattributes:\n\n\n``uuid``\n    A unique identifier for that particular run.\n\n``run_name``\n    The name of the run (if provided)\n\n``project``\n    The name of the project the run belongs to (if provided)\n\n``project_stage``\n    The project stage the run is associated with (if provided)\n\n``duration``\n    The length of time the run took to complete.\n\n``start_time``\n    The time the run was stared.\n\n``end_time``\n    The time at which the run finished.\n"
  },
  {
    "path": "doc/source/api/workload.rst",
    "content": ".. _workloads-api:\n\nWorkloads\n~~~~~~~~~\n.. _workload-api:\n\nWorkload\n^^^^^^^^\n\nThe base :class:`Workload` interface is as follows, and is the base class for\nall :ref:`workload types <workload-types>`. For more information about to\nimplement your own workload please see the\n:ref:`Developer How Tos <adding-a-workload-example>`.\n\nAll instances of a workload will have the following attributes:\n\n``name``\n   This identifies the workload (e.g. it is used to specify the\n   workload in the :ref:`agenda <agenda>`).\n\n``phones_home``\n    This can be set to True to mark that this workload poses a risk of\n    exposing information to the outside world about the device it runs on.\n    For example a benchmark application that sends scores and device data\n    to a database owned by the maintainer.\n\n``requires_network``\n    Set this to ``True`` to mark the the workload will fail without a network\n    connection, this enables it to fail early with a clear message.\n\n``asset_directory``\n    Set this to specify a custom directory for assets to be pushed to, if\n    unset the working directory will be used.\n\n``asset_files``\n    This can be used to automatically deploy additional assets to\n    the device. If required the attribute should contain a list of file\n    names that are required by the workload which will be attempted to be\n    found by the resource getters\n\nmethods\n\"\"\"\"\"\"\"\n\n.. method:: Workload.init_resources(context)\n\n    This method may be optionally overridden to implement dynamic\n    resource discovery for the workload. This method executes\n    early on, before the device has been initialized, so it\n    should only be used to initialize resources that do not\n    depend on the device to resolve. This method is executed\n    once per run for each workload instance.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.validate(context)\n\n    This method can be used to validate any assumptions your workload\n    makes about the environment (e.g. that required files are\n    present, environment variables are set, etc) and should raise a\n    :class:`wa.WorkloadError <wa.framework.exception.WorkloadError>`\n    if that is not the case. The base class implementation only makes\n    sure sure that the name attribute has been set.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.initialize(context)\n\n    This method is decorated with the ``@once_per_instance`` decorator,\n    (for more information please see\n    :ref:`Execution Decorators <execution-decorators>`)\n    therefore it will be executed exactly once per run (no matter\n    how many instances of the workload there are). It will run\n    after the device has been initialized, so it may be used to\n    perform device-dependent initialization that does not need to\n    be repeated on each iteration (e.g. as installing executables\n    required by the workload on the device).\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.setup(context)\n\n    Everything that needs to be in place for workload execution should\n    be done in this method. This includes copying files to the device,\n    starting up an application, configuring communications channels,\n    etc.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.setup_rerun(context)\n\n    Everything that needs to be in place for workload execution should\n    be done in this method. This includes copying files to the device,\n    starting up an application, configuring communications channels,\n    etc.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.run(context)\n\n    This method should perform the actual task that is being measured.\n    When this method exits, the task is assumed to be complete.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n    .. note:: Instruments are kicked off just before calling this\n            method and disabled right after, so everything in this\n            method is being measured. Therefore this method should\n            contain the least code possible to perform the operations\n            you are interested in measuring. Specifically, things like\n            installing or starting applications, processing results, or\n            copying files to/from the device should be done elsewhere if\n            possible.\n\n\n\n.. method:: Workload.extract_results(context)\n\n    This method gets invoked after the task execution has finished and\n    should be used to extract metrics from the target.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.update_output(context)\n\n    This method should be used to update the output within the specified\n    execution context with the metrics and artifacts from this\n    workload iteration.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.teardown(context)\n\n    This could be used to perform any cleanup you may wish to do, e.g.\n    Uninstalling applications, deleting file on the device, etc.\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n\n.. method:: Workload.finalize(context)\n\n    This is the complement to ``initialize``. This will be executed\n    exactly once at the end of the run. This should be used to\n    perform any final clean up (e.g. uninstalling binaries installed\n    in the ``initialize``)\n\n    :param context: The :ref:`Context <context>` for the current run.\n\n.. _apkworkload-api:\n\nApkWorkload\n^^^^^^^^^^^^\n\nThe :class:`ApkWorkload` derives from the base :class:`Workload` class however\nthis associates the workload with a package allowing for an apk to be found for\nthe workload, setup and ran on the device before running the workload.\n\nIn addition to the attributes mentioned above ApkWorloads this class also\nfeatures the following attributes however this class does not present any new\nmethods.\n\n\n``loading_time``\n    This is the time in seconds that WA will wait for the application to load\n    before continuing with the run. By default this will wait 10 second however\n    if your application under test requires additional time this values should\n    be increased.\n\n``package_names``\n    This attribute should be a list of Apk packages names that are\n    suitable for this workload. Both the host (in the relevant resource\n    locations) and device will be searched for an application with a matching\n    package name.\n\n``supported_versions``\n    This attribute should be a list of apk versions that are suitable for this\n    workload, if a specific apk version is not specified then any available\n    supported version may be chosen.\n\n``activity``\n    This attribute can be optionally set to override the default activity that\n    will be extracted from the selected APK file which will be used when\n    launching the APK.\n\n``view``\n    This is the \"view\" associated with the application. This is used by\n    instruments like ``fps`` to monitor the current framerate being generated by\n    the application.\n\n``apk``\n    The is a :class:`PackageHandler`` which is what is used to store\n    information about the apk and  manage the application itself, the handler is\n    used to call the associated methods to manipulate the application itself for\n    example to launch/close it etc.\n\n``package``\n    This is a more convenient way to access the package name of the Apk\n    that was found and being used for the run.\n\n\n.. _apkuiautoworkload-api:\n\nApkUiautoWorkload\n^^^^^^^^^^^^^^^^^\n\nThe :class:`ApkUiautoWorkload` derives from :class:`ApkUIWorkload` which is an\nintermediate class which in turn inherits from\n:class:`ApkWorkload`, however in addition to associating an apk with the\nworkload this class allows for automating the application with UiAutomator.\n\nThis class define these additional attributes:\n\n``gui``\n    This attribute will be an instance of a :class:`UiAutmatorGUI` which is\n    used to control the automation, and is what is used to pass parameters to the\n    java class for example ``gui.uiauto_params``.\n\n\n.. _apkreventworkload-api:\n\nApkReventWorkload\n^^^^^^^^^^^^^^^^^\n\nThe :class:`ApkReventWorkload` derives from :class:`ApkUIWorkload` which is an\nintermediate class which in turn inherits from\n:class:`ApkWorkload`, however in addition to associating an apk with the\nworkload this class allows for automating the application with\n:ref:`Revent <revent_files_creation>`.\n\nThis class define these additional attributes:\n\n``gui``\n    This attribute will be an instance of a :class:`ReventGUI` which is\n    used to control the automation\n\n``setup_timeout``\n    This is the time allowed for replaying a recording for the setup stage.\n\n``run_timeout``\n    This is the time allowed for replaying a recording for the run stage.\n\n``extract_results_timeout``\n    This is the time allowed for replaying a recording for the extract results stage.\n\n``teardown_timeout``\n    This is the time allowed for replaying a recording for the teardown stage.\n\n\n.. _uiautoworkload-api:\n\nUiautoWorkload\n^^^^^^^^^^^^^^\n\nThe :class:`UiautoWorkload` derives from :class:`UIWorkload` which is an\nintermediate class which in turn inherits from\n:class:`Workload`, however this allows for providing generic automation using\nUiAutomator without associating a particular application with the workload.\n\nThis class define these additional attributes:\n\n``gui``\n    This attribute will be an instance of a :class:`UiAutmatorGUI` which is\n    used to control the automation, and is what is used to pass parameters to the\n    java class for example ``gui.uiauto_params``.\n\n\n.. _reventworkload-api:\n\nReventWorkload\n^^^^^^^^^^^^^^\n\nThe :class:`ReventWorkload` derives from :class:`UIWorkload` which is an\nintermediate class which in turn inherits from\n:class:`Workload`, however this allows for providing generic automation\nusing :ref:`Revent <revent_files_creation>` without associating with the\nworkload.\n\nThis class define these additional attributes:\n\n``gui``\n    This attribute will be an instance of a :class:`ReventGUI` which is\n    used to control the automation\n\n``setup_timeout``\n    This is the time allowed for replaying a recording for the setup stage.\n\n``run_timeout``\n    This is the time allowed for replaying a recording for the run stage.\n\n``extract_results_timeout``\n    This is the time allowed for replaying a recording for the extract results stage.\n\n``teardown_timeout``\n    This is the time allowed for replaying a recording for the teardown stage.\n\n\n"
  },
  {
    "path": "doc/source/api.rst",
    "content": "Workload Automation API\n=======================\n\n.. toctree::\n    :maxdepth: 2\n\n    api/output\n\n    api/workload\n"
  },
  {
    "path": "doc/source/changes.rst",
    "content": "=================================\nWhat's New in Workload Automation\n=================================\n\n***********\nVersion 3.3.1\n***********\n\n.. warning:: This is the last release supporting Python 3.5 and Python 3.6.\n             Subsequent releases will support Python 3.7+.\n\nNew Features:\n==============\n\nCommands:\n---------\n\nInstruments:\n------------\n    - ``perf``: Add support for ``report-sample``.\n\nWorkloads:\n----------------\n    - ``PCMark``: Add support for PCMark 3.0.\n    - ``Antutu``: Add support for 9.1.6.\n    - ``Geekbench``: Add support for Geekbench5.\n    - ``gfxbench``: Support the non corporate version.\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n    - Fix installation on systems without git installed.\n    - Avoid querying online cpus if hotplug is disabled.\n\nDockerfile:\n-----------\n    - Update base image to Ubuntu 20.04.\n\nInstruments:\n------------\n    - ``perf``: Fix parsing csv with using interval-only-values.\n    - ``perf``: Improve error reporting of an invalid agenda.\n\nOutput Processors:\n------------------\n    - ``postgres``: Fixed SQL command when creating a new event.\n\nWorkloads:\n----------\n    - ``speedometer``: Fix adb reverse when rebooting a device.\n    - ``googleplaybook``: Support newer apk version.\n    - ``googlephotos``: Support newer apk version.\n    - ``gmail``: Support newer apk version.\n\nOther:\n------\n    - Upgrade Android Gradle to 7.2 and Gradle plugin to 4.2.\n\n***********\nVersion 3.3\n***********\n\nNew Features:\n==============\n\nCommands:\n---------\n    - Add ``report`` command to provide a summary of a run.\n\nInstruments:\n------------\n    - Add ``proc_stat`` instrument to monitor CPU load using data from ``/proc/stat``.\n\nFramework:\n----------\n    - Add support for simulating atomic writes to prevent race conditions when running current instances of WA.\n    - Add support file transfer for SSH connections via SFTP and falling back to using SCP implementation.\n    - Support detection of logcat buffer overflow and present a warning if this occurs.\n    - Allow skipping all remaining jobs if a job had exhausted all of its retires.\n    - Add polling mechanism for file transfers rather than relying on timeouts.\n    - Add `run_completed` reboot policy to enable rebooting a target after a run has been completed.\n\n\nAndroid Devices:\n----------------\n    - Enable configuration of whether to keep the screen on while the device is plugged in.\n\nOutput Processors:\n------------------\n    - Enable the use of cascading deletion in Postgres databases to clean up after deletion of a run entry.\n\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n    - Improvements to the ``process`` command to correctly handle skipped and in process jobs.\n    - Add support for deprecated parameters allowing for a warning to be raised when providing\n      a parameter that will no longer have an effect.\n    - Switch implementation of SSH connections to use Paramiko for greater stability.\n    - By default use sftp for file transfers with SSH connections, allow falling back to scp\n      by setting ``use_scp``.\n    - Fix callbacks not being disconnected correctly when requested.\n    - ``ApkInfo`` objects are now cached to reduce re-parsing of APK files.\n    - Speed up discovery of wa output directories.\n    - Fix merge handling of parameters from multiple files.\n\nDockerfile:\n-----------\n    - Install additional instruments for use in the docker environment.\n    - Fix environment variables not being defined in non interactive environments.\n\nInstruments:\n------------\n    - ``trace_cmd`` additional fixes for python 3 support.\n\nOutput Processors:\n------------------\n    - ``postgres``: Fixed SQL command when creating a new event.\n\nWorkloads:\n----------\n    - ``aitutu``: Improve reliability of results extraction.\n    - ``androbench``: Enabling dismissing of additional popups on some devices.\n    - ``antutu``: Now supports major version 8 in additional to version 7.X.\n    - ``exoplayer``: Add support for Android 10.\n    - ``googlephotos``: Support newer apk version.\n    - ``gfxbench``: Allow user configuration for which tests should be ran.\n    - ``gfxbench``: Improved score detection for a wider range of devices.\n    - ``gfxbench``: Moved results extraction out of run stage.\n    - ``jankbench``: Support newer versions of Pandas for processing.\n    - ``pcmark``: Add support for handling additional popups and installation flows.\n    - ``pcmark``: No longer clear and re-download test data before each execution.\n    - ``speedometer``: Enable the workload to run offline and drops requirement for\n      UiAutomator. To support this root access is now required to run the workload.\n    - ``youtube``: Update to support later versions of the apk.\n\nOther:\n------\n    - ``cpustates``: Improved name handling for unknown idle states.\n\n\n***********\nVersion 3.2\n***********\n\n.. warning:: This release only supports Python 3.5+. Python 2 support has now\n             been dropped.\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n    - ``TargetInfo`` now tracks installed modules and will ensure the cache is\n      also updated on module change.\n    - Migrated the build scripts for uiauto based workloads to Python 3.\n    - Uiauto applications now target SDK version 28 to prevent PlayProtect\n      blocking the installation of the automation apks on some devices.\n    - The workload metadata now includes the apk package name if applicable.\n\nInstruments:\n------------\n    - ``energy_instruments`` will now have their ``teardown`` method called\n      correctly.\n    - ``energy_instruments``: Added a ``keep_raw`` parameter to control whether\n      raw files generated during execution should be deleted upon teardown.\n    - Update relevant instruments to make use of the new devlib collector\n      interface, for more information please see the\n      `devlib documentation <https://devlib.readthedocs.io/en/latest/collectors.html>`_.\n\nOutput Processors:\n------------------\n    - ``postgres``: If initialisation fails then the output processor will no\n      longer attempt to reconnect at a later point during the run.\n    - ``postgres``: Will now ensure that the connection to the database is\n      re-established if it is dropped e.g. due to a long expecting workload.\n    - ``postgres``: Change the type of the ``hostid`` field to ``Bigint`` to\n      allow a larger range of ids.\n    - ``postgres``: Bump schema version to 1.5.\n    - ``perf``: Added support for the ``simpleperf`` profiling tool for android\n      devices.\n    - ``perf``: Added support for the perf ``record`` command.\n    - ``cpustates``: Improve handling of situations where cpufreq and/or cpuinfo\n      data is unavailable.\n\nWorkloads:\n----------\n    - ``adodereader``: Now support apk version 19.7.1.10709.\n    - ``antutu``: Supports dismissing of popup asking to create a shortcut on\n      the homescreen.\n    - ``gmail``: Now supports apk version 2019.05.26.252424914.\n    - ``googlemaps``: Now supports apk version 10.19.1.\n    - ``googlephotos``: Now supports apk version 4.28.0.\n    - ``geekbench``: Added support for versions 4.3.4, 4.4.0 and 4.4.2.\n    - ``geekbench-corporate``: Added support for versions 5.0.1 and 5.0.3.\n    - ``pcmark``: Now locks device orientation to portrait to increase\n      compatibility.\n    - ``pcmark``: Supports dismissing new Android 10 permission warnings.\n\nOther:\n------\n    - Improve documentation to help debugging module installation errors.\n\n*************\nVersion 3.1.4\n*************\n\n.. warning:: This is the last release that supports Python 2. Subsequent versions\n             will be support Python 3.5+ only.\n\nNew Features:\n==============\n\nFramework:\n----------\n    - ``ApkWorkload``: Allow specifying A maximum and minimum version of an APK\n      instead of requiring a specific version.\n    - ``TestPackageHandler``: Added to support running android applications that\n      are invoked via ``am instrument``.\n    - Directories can now be added as ``Artifacts``.\n\nWorkloads:\n----------\n    - ``aitutu``: Executes the Aitutu Image Speed/Accuracy and Object\n      Speed/Accuracy tests.\n    - ``uibench``: Run a configurable activity of the UIBench workload suite.\n    - ``uibenchjanktests``: Run an automated and instrument version of the\n      UIBench JankTests.\n    - ``motionmark``: Run a browser graphical benchmark.\n\nOther:\n------\n    - Added ``requirements.txt`` as a reference for known working package versions.\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n    - ``JobOuput``:  Added an ``augmentation`` attribute to allow listing of\n      enabled augmentations for individual jobs.\n    - Better error handling for misconfiguration job selection.\n    - All ``Workload`` classes now have an ``uninstall`` parameter to control whether\n      any binaries installed to the target should be uninstalled again once the\n      run has completed.\n    - The ``cleanup_assets`` parameter is now more consistently utilized across\n      workloads.\n    - ``ApkWorkload``: Added an ``activity`` attribute to allow for overriding the\n      automatically detected version from the APK.\n    - ``ApkWorkload`` Added support for providing an implicit activity path.\n    - Fixed retrieving job level artifacts from a database backend.\n\nOutput Processors:\n------------------\n    - ``SysfsExtractor``: Ensure that the extracted directories are added as\n      ``Artifacts``.\n    - ``InterruptStatsInstrument``: Ensure that the output files are added as\n      ``Artifacts``.\n    - ``Postgres``: Fix missing ``system_id`` field from ``TargetInfo``.\n    - ``Postgres``: Support uploading directory ``Artifacts``.\n    - ``Postgres``: Bump the schema version to v1.3.\n\nWorkloads:\n----------\n    - ``geekbench``: Improved apk version handling.\n    - ``geekbench``: Now supports apk version 4.3.2.\n\nOther:\n------\n    - ``Dockerfile``: Now installs all optional extras for use with WA.\n    - Fixed support for YAML anchors.\n    - Fixed building of documentation with Python 3.\n    - Changed shorthand of installing all of WA extras to `all` as per\n      the documentation.\n    - Upgraded the Dockerfile to use Ubuntu 18.10 and Python 3.\n    - Restricted maximum versions of ``numpy`` and ``pandas`` for Python 2.7.\n\n\n*************\nVersion 3.1.3\n*************\n\nFixes/Improvements\n==================\n\nOther:\n------\n    - Security update for PyYAML to attempt prevention of arbitrary code execution\n      during parsing.\n\n*************\nVersion 3.1.2\n*************\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n    - Implement an explicit check for Devlib versions to ensure that versions\n      are kept in sync with each other.\n    - Added a ``View`` parameter to ApkWorkloads for use with certain instruments\n      for example ``fps``.\n    - Added ``\"supported_versions\"`` attribute to workloads to allow specifying a\n      list of supported version for a particular workload.\n    - Change default behaviour to run any available version of a workload if a\n      specific version is not specified.\n\nOutput Processors:\n------------------\n    - ``Postgres``: Fix handling of ``screen_resoultion`` during processing.\n\nOther\n-----\n    - Added additional information to documentation\n    - Added fix for Devlib's ``KernelConfig`` refactor\n    - Added a ``\"label\"`` property to ``Metrics``\n\n*************\nVersion 3.1.1\n*************\n\nFixes/Improvements\n==================\n\nOther\n-----\n    - Improve formatting when displaying metrics\n    - Update revent binaries to include latest fixes\n    - Update DockerImage to use new released version of WA and Devlib\n    - Fix broken package on PyPi\n\n*************\nVersion 3.1.0\n*************\n\nNew Features:\n==============\n\nCommands\n---------\n    - ``create database``: Added :ref:`create subcommand <create-command>`\n      command in order to initialize a PostgresSQL database to allow for storing\n      WA output with the Postgres Output Processor.\n\nOutput Processors:\n------------------\n    - ``Postgres``: Added output processor which can be used to populate a\n      Postgres database with the output generated from a WA run.\n    - ``logcat-regex``: Add new output processor to extract arbitrary \"key\"\n      \"value\" pairs from logcat.\n\nConfiguration:\n--------------\n    - :ref:`Configuration Includes <config-include>`: Add support for including\n      other YAML files inside agendas and config files using ``\"include#:\"``\n      entries.\n    - :ref:`Section groups <section-groups>`: This allows for a ``group`` entry\n      to be specified for each section and will automatically cross product the\n      relevant sections with sections from other groups adding the relevant\n      classifiers.\n\nFramework:\n----------\n    - Added support for using the :ref:`OutputAPI <output_processing_api>` with a\n      Postgres Database backend. Used to retrieve and\n      :ref:`process <processing_output>` run data uploaded by the ``Postgres``\n      output processor.\n\nWorkloads:\n----------\n    - ``gfxbench-corporate``: Execute a set of on and offscreen graphical benchmarks from\n      GFXBench including Car Chase and Manhattan.\n    - ``glbench``: Measures the graphics performance of Android devices by\n      testing the underlying OpenGL (ES) implementation.\n\n\nFixes/Improvements\n==================\n\nFramework:\n----------\n  - Remove quotes from ``sudo_cmd`` parameter default value due to changes in\n    devlib.\n  - Various Python 3 related fixes.\n  - Ensure plugin names are converted to identifiers internally to act more\n    consistently when dealing with names containing ``-``'s etc.\n  - Now correctly updates RunInfo with project and run name information.\n  - Add versioning support for POD structures with the ability to\n    automatically update data structures / formats to new versions.\n\nCommands:\n---------\n  - Fix revent target initialization.\n  - Fix revent argument validation.\n\nWorkloads:\n----------\n  - ``Speedometer``: Close open tabs upon workload completion.\n  - ``jankbench``: Ensure that the logcat monitor thread is terminated\n    correctly to prevent left over adb processes.\n  - UiAutomator workloads are now able to dismiss android warning that a\n    workload has not been designed for the latest version of android.\n\nOther:\n------\n- Report additional metadata about target, including: system_id,\n  page_size_kb.\n- Uses cache directory to reduce target calls, e.g. will now use cached\n  version of TargetInfo if local copy is found.\n- Update recommended :ref:`installation <github>` commands when installing from\n  github due to pip not following dependency links correctly.\n- Fix incorrect parameter names in runtime parameter documentation.\n\n\n--------------------------------------------------\n\n\n*************\nVersion 3.0.0\n*************\n\nWA3 is a more or less from-scratch re-write of WA2. We have attempted to\nmaintain configuration-level compatibility wherever possible (so WA2 agendas\n*should* mostly work with WA3), however some breaks are likely and minor tweaks\nmay be needed.\n\nIt terms of the API, WA3 is completely different, and WA2 extensions **will not\nwork** with WA3 -- they would need to be ported into WA3 plugins.\n\nFor more information on migrating from WA2 to WA3 please see the\n:ref:`migration-guide`.\n\nNot all of WA2 extensions have been ported for the initial 3.0.0 release. We\nhave ported the ones we believe to be most widely used and useful. The porting\nwork will continue, and more of WA2's extensions will be in the future releases.\nHowever, we do not intend to port absolutely everything, as some things we\nbelieve to be no longer useful.\n\n.. note:: If there a particular WA2 extension you would like to see in WA3 that\n          is not yet there, please let us know via the GitHub issues. (And, of\n          course, we always welcome pull requests, if you have the time to\n          do the port yourselves :-) ).\n\nNew Features\n============\n\n- Python 3 support. WA now runs on both Python 2 and Python 3.\n\n  .. warning:: Python 2 support should now be considered deprecated. Python 2\n               will still be fully supported up to the next major release\n               (v3.1). After that, Python 2 will be supported for existing\n               functionality, however there will be no guarantee that newly\n               added functionality would be compatible with Python 2. Support\n               for Python 2 will be dropped completely after release v3.2.\n\n- There is a new Output API which can be used to aid in post processing a\n  run's output. For more information please see :ref:`output_processing_api`.\n- All \"augmentations\" can now be enabled on a per workload basis (in WA2 this\n  was available for instruments, but not result processors).\n- More portable runtime parameter specification. Runtime parameters now support\n  generic aliases, so instead of specifying ``a73_frequency: 1805000`` in your\n  agenda, and then having to modify this for another target, it is now possible\n  to specify ``big_frequency: max``.\n- ``-c`` option can now be used multiple times to specify several config files\n  for a single run, allowing for a more fine-grained configuration management.\n- It is now possible to disable all previously configured augmentations from an\n  agenda using ``~~``.\n- Offline output processing with ``wa process`` command. It is now possible to\n  run processors on previously collected WA results, without the need for a\n  target connection.\n- A lot more metadata is collected as part of the run, including much more\n  detailed information about the target, and MD5 hashes of all resources used\n  during the run.\n- Better ``show`` command. ``wa show`` command now utilizes ``pandoc`` and\n  ``man`` to produce easier-to-browse documentation format, and has been\n  enhanced to include documentation on general settings, runtime parameters, and\n  plugin aliases.\n- Better logging. The default ``stdout`` output is now more informative.\n  The verbose output is much more detailed. Nested indentation is used for\n  different phases of execution to make log output easier to parse visually.\n- Full ``ChromeOS`` target support. Including support for the Android container\n  apps.\n- Implemented on top of devlib_. WA3 plugins can make use of devlib's enhanced\n  target API (much richer and more robust than WA2's Device API).\n- All-new documentation. The docs have been revamped to be more useful and\n  complete.\n\n.. _devlib: https://github.com/ARM-software/devlib\n\nChanges\n=======\n\n- Configuration files ``config.py`` are now specified in YAML format in\n  ``config.yaml``. WA3 has support for automatic conversion of the default\n  config file and will be performed upon first invocation of WA3.\n- The \"config\" and \"global\" sections in an agenda are now interchangeable so can\n  all be specified in a \"config\" section.\n- \"Results Processors\" are now known as \"Output Processors\" and can now be ran\n  offline.\n- \"Instrumentation\" is now known as \"Instruments\" for more consistent naming.\n- Both \"Output Processor\" and \"Instrument\" configuration have been merged into\n  \"Augmentations\" (support for the old naming schemes have been retained for\n  backwards compatibility)\n\n\n"
  },
  {
    "path": "doc/source/conf.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright 2023 ARM Limited\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# WA3 documentation build configuration file.\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\nimport shlex\n\nthis_dir = os.path.dirname(__file__)\nsys.path.insert(0, os.path.join(this_dir, '..'))\nsys.path.insert(0, os.path.join(this_dir, '../..'))\nimport wa\nfrom build_plugin_docs import (generate_plugin_documentation,\n                               generate_run_config_documentation,\n                               generate_meta_config_documentation,\n                               generate_target_documentation)\nfrom build_instrument_method_map import generate_instrument_method_map\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    'sphinx.ext.autodoc',\n    'sphinx.ext.viewcode',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['static/templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'wa'\ncopyright = u'2023, ARM Limited'\nauthor = u'ARM Limited'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = wa.framework.version.get_wa_version()\n# The full version, including alpha/beta/rc tags.\nrelease = wa.framework.version.get_wa_version()\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['../build', 'developer_information',\n                    'user_information', 'run_config']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n      'logo_only': True\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents.  If None, it defaults to\n# \"<project> v<release> documentation\".\n#html_title = None\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = 'WA-logo-white.svg'\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\n# html_static_path = ['static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'wadoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n  (master_doc, 'wa.tex', u'wa Documentation',\n   u'Arm Limited', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    (master_doc, 'wa', u'wa Documentation',\n     [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n  (master_doc, 'wa', u'wa Documentation',\n   author, 'wa', 'A framework for automating workload execution on mobile devices.',\n   'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n\ndef setup(app):\n    module_dir = os.path.join('..', '..', 'wa')\n    excluded_extensions = [os.path.join(module_dir, 'framework'),\n                           os.path.join(module_dir, 'tests')]\n    os.chdir(os.path.dirname(__file__))\n    generate_plugin_documentation(module_dir, 'plugins', excluded_extensions)\n    generate_target_documentation('plugins')\n    generate_run_config_documentation('run_config')\n    generate_meta_config_documentation('run_config')\n    generate_instrument_method_map(os.path.join('developer_information', 'developer_guide',\n                                                'instrument_method_map.rst'))\n    app.add_object_type('confval', 'confval',\n                        objname='configuration value',\n                        indextemplate='pair: %s; configuration value')\n"
  },
  {
    "path": "doc/source/developer_information/developer_guide/writing_plugins.rst",
    "content": ".. _writing-plugins:\n\n\nWriting Plugins\n================\n\nWorkload Automation offers several plugin points (or plugin types). The most\ninteresting of these are\n\n:workloads: These are the tasks that get executed and measured on the device. These\n            can be benchmarks, high-level use cases, or pretty much anything else.\n:targets: These are interfaces to the physical devices (development boards or end-user\n          devices, such as smartphones) that use cases run on. Typically each model of a\n          physical device would require its own interface class (though some functionality\n          may be reused by subclassing from an existing base).\n:instruments: Instruments allow collecting additional data from workload execution (e.g.\n              system traces). Instruments are not specific to a particular workload. Instruments\n              can hook into any stage of workload execution.\n:output processors: These are used to format the results of workload execution once they have been\n                    collected. Depending on the callback used, these will run either after each\n                    iteration and/or at the end of the run, after all of the results have been\n                    collected.\n\nYou can create a plugin by subclassing the appropriate base class, defining\nappropriate methods and attributes, and putting the .py file containing the\nclass into the \"plugins\" subdirectory under ``~/.workload_automation`` (or\nequivalent) where it will be automatically picked up by WA.\n\n\nPlugin Basics\n--------------\n\nThis sub-section covers things common to implementing plugins of all types. It\nis recommended you familiarize yourself with the information here before\nproceeding onto guidance for specific plugin types.\n\n.. _resource-resolution:\n\nDynamic Resource Resolution\n~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nThe idea is to decouple resource identification from resource discovery.\nWorkloads/instruments/devices/etc state *what* resources they need, and not\n*where* to look for them -- this instead is left to the resource resolver that\nis part of the execution context. The actual discovery of resources is\nperformed by resource getters that are registered with the resolver.\n\nA resource type is defined by a subclass of\n:class:`wa.framework.resource.Resource`. An instance of this class describes a\nresource that is to be obtained. At minimum, a ``Resource`` instance has an\nowner (which is typically the object that is looking for the resource), but\nspecific resource types may define other parameters that describe an instance of\nthat resource (such as file names, URLs, etc).\n\nAn object looking for a resource invokes a resource resolver with an instance of\n``Resource`` describing the resource it is after. The resolver goes through the\ngetters registered for that resource type in priority order attempting to obtain\nthe resource; once the resource is obtained, it is returned to the calling\nobject. If none of the registered getters could find the resource,\n``NotFoundError`` is raised (or ``None`` is returned instead, if invoked with\n``strict=False``).\n\nThe most common kind of object looking for resources is a ``Workload``, and the\n``Workload`` class defines\n:py:meth:`wa.framework.workload.Workload.init_resources` method, which may be\noverridden by subclasses to perform resource resolution. For example, a workload\nlooking for an executable file would do so like this::\n\n    from wa import Workload\n    from wa.import Executable\n\n    class MyBenchmark(Workload):\n\n        # ...\n\n        def init_resources(self, resolver):\n            resource = Executable(self, self.target.abi, 'my_benchmark')\n            host_exe = resolver.get(resource)\n\n        # ...\n\n\nCurrently available resource types are defined in :py:mod:`wa.framework.resources`.\n\n.. _deploying-executables:\n\nDeploying executables to a target\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nSome targets may have certain restrictions on where executable binaries may be\nplaced and how they should be invoked. To ensure your plugin works with as\nwide a range of targets as possible, you should use WA APIs for deploying and\ninvoking executables on a target, as outlined below.\n\nAs with other resources, host-side paths to the executable binary to be deployed\nshould be obtained via the :ref:`resource resolver <resource-resolution>`. A\nspecial resource type, ``Executable`` is used to identify  a binary to be\ndeployed. This is similar to the regular ``File`` resource, however it takes an\nadditional parameter that specifies the ABI for which the executable was\ncompiled for.\n\nIn order for the binary to be obtained in this way, it must be stored in one of\nthe locations scanned by the resource resolver in a directory structure\n``<root>/bin/<abi>/<binary>`` (where ``root`` is the base resource location to\nbe searched, e.g. ``~/.workload_automation/dependencies/<plugin name>``, and\n``<abi>`` is the ABI for which the executable has been compiled, as returned by\n``self.target.abi``).\n\nOnce the path to the host-side binary has been obtained, it may be deployed\nusing one of two methods from a\n`Target <http://devlib.readthedocs.io/en/latest/target.html>`_ instance --\n``install`` or ``install_if_needed``. The latter will check a version of that\nbinary has been previously deployed by WA and will not try to re-install.\n\n.. code:: python\n\n  from wa import Executable\n\n  host_binary = context.get(Executable(self, self.target.abi, 'some_binary'))\n  target_binary = self.target.install_if_needed(host_binary)\n\n\n.. note:: Please also note that the check is done based solely on the binary name.\n          For more information please see the devlib\n          `documentation <http://devlib.readthedocs.io/en/latest/target.html#Target.install_if_needed>`_.\n\nBoth of the above methods will return the path to the installed binary on the\ntarget. The executable should be invoked *only* via that path; do **not** assume\nthat it will be in ``PATH`` on the target (or that the executable with the same\nname in ``PATH`` is the version deployed by WA.\n\nFor more information on how to implement this, please see the\n:ref:`how to guide <deploying-executables-example>`.\n\n\nDeploying assets\n-----------------\nWA provides a generic mechanism for deploying assets during workload initialization.\nWA will automatically try to retrieve and deploy each asset to the target's working directory\nthat is contained in a workloads ``deployable_assets`` attribute stored as a list.\n\nIf the parameter ``cleanup_assets`` is set then any asset deployed will be removed\nagain and the end of the run.\n\nIf the workload requires a custom deployment mechanism the ``deploy_assets``\nmethod can be overridden for that particular workload, in which case, either\nadditional assets should have their on target paths added to the workload's\n``deployed_assests`` attribute or the corresponding ``remove_assets`` method\nshould also be implemented.\n\n.. _instrument-reference:\n\nAdding an Instrument\n---------------------\nInstruments can be used to collect additional measurements during workload\nexecution (e.g. collect power readings). An instrument can hook into almost any\nstage of workload execution. Any new instrument should be a subclass of\nInstrument and it must have a name. When a new instrument is added to Workload\nAutomation, the methods of the new instrument will be found automatically and\nhooked up to the supported signals. Once a signal is broadcasted, the\ncorresponding registered method is invoked.\n\nEach method in ``Instrument`` must take two arguments, which are ``self`` and\n``context``. Supported methods and their corresponding signals can be found in\nthe :ref:`Signals Documentation <instruments_method_map>`. To make\nimplementations easier and common, the basic steps to add new instrument is\nsimilar to the steps to add new workload and an example can be found in the\n:ref:`How To <adding-an-instrument-example>` section.\n\n.. _instrument-api:\n\nTo implement your own instrument the relevant methods of the interface shown\nbelow should be implemented:\n\n    :name:\n\n            The name of the instrument, this must be unique to WA.\n\n    :description:\n\n            A description of what the instrument can be used for.\n\n    :parameters:\n\n            A list of additional :class:`Parameters` the instrument can take.\n\n    :initialize(context):\n\n                This method will only be called once during the workload run\n                therefore operations that only need to be performed initially should\n                be performed here for example pushing the files to the target device,\n                installing them.\n\n    :setup(context):\n\n                This method is invoked after the workload is setup. All the\n                necessary setup should go inside this method. Setup, includes\n                operations like clearing logs, additional configuration etc.\n\n    :start(context):\n\n                It is invoked just before the workload start execution. Here is\n                where instrument measurement start being registered/taken.\n\n    :stop(context):\n\n                It is invoked just after the workload execution stops and where\n                the measurements should stop being taken/registered.\n\n    :update_output(context):\n\n                This method is invoked after the workload updated its result and\n                where the taken measures should be added to the result so it can be\n                processed by WA.\n\n    :teardown(context):\n\n                It is invoked after the workload is torn down. It is a good place\n                to clean any logs generated by the instrument.\n\n    :finalize(context):\n\n                This method is the complement to the initialize method and will also\n                only be called once so should be used to deleting/uninstalling files\n                pushed to the device.\n\n\nThis is similar to a ``Workload``, except all methods are optional. In addition to\nthe workload-like methods, instruments can define a number of other methods that\nwill get invoked at various points during run execution. The most useful of\nwhich is perhaps ``initialize`` that gets invoked after the device has been\ninitialised for the first time, and can be used to perform one-time setup (e.g.\ncopying files to the device -- there is no point in doing that for each\niteration). The full list of available methods can be found in\n:ref:`Signals Documentation <instruments_method_map>`.\n\n.. _prioritization:\n\nPrioritization\n~~~~~~~~~~~~~~\n\nCallbacks (e.g. ``setup()`` methods) for all instruments get executed at the\nsame point during workload execution, one after another. The order in which the\ncallbacks get invoked should be considered arbitrary and should not be relied\non (e.g. you cannot expect that just because instrument A is listed before\ninstrument B in the config, instrument A's callbacks will run first).\n\nIn some cases (e.g. in ``start()`` and ``stop()`` methods), it is important to\nensure that a particular instrument's callbacks run a closely as possible to the\nworkload's invocations in order to maintain accuracy of readings; or,\nconversely, that a callback is executed after the others, because it takes a\nlong time and may throw off the accuracy of other instruments. You can do\nthis by using decorators on the appropriate methods. The available decorators are:\n``very_slow``, ``slow``, ``normal``, ``fast``, ``very_fast``, with ``very_fast``\nrunning closest to the workload invocation and ``very_slow`` running furtherest\naway. For example::\n\n    from wa import very_fast\n    # ..\n\n    class PreciseInstrument(Instrument)\n\n        # ...\n        @very_fast\n        def start(self, context):\n            pass\n\n        @very_fast\n        def stop(self, context):\n            pass\n\n        # ...\n\n``PreciseInstrument`` will be started after all other instruments (i.e.\n*just* before the workload runs), and it will stopped before all other\ninstruments (i.e. *just* after the workload runs).\n\nIf more than one active instrument has specified fast (or slow) callbacks, then\ntheir execution order with respect to each other is not guaranteed. In general,\nhaving a lot of instruments enabled is going to negatively affect the\nreadings. The best way to ensure accuracy of measurements is to minimize the\nnumber of active instruments (perhaps doing several identical runs with\ndifferent instruments enabled).\n\nExample\n^^^^^^^\n\nBelow is a simple instrument that measures the execution time of a workload::\n\n    class ExecutionTimeInstrument(Instrument):\n        \"\"\"\n        Measure how long it took to execute the run() methods of a Workload.\n\n        \"\"\"\n\n        name = 'execution_time'\n\n        def initialize(self, context):\n            self.start_time = None\n            self.end_time = None\n\n        @very_fast\n        def start(self, context):\n            self.start_time = time.time()\n\n        @very_fast\n        def stop(self, context):\n            self.end_time = time.time()\n\n        def update_output(self, context):\n            execution_time = self.end_time - self.start_time\n            context.add_metric('execution_time', execution_time, 'seconds')\n\n\n.. include:: developer_information/developer_guide/instrument_method_map.rst\n\n.. _adding-an-output-processor:\n\nAdding an Output processor\n----------------------------\n\nA output processor is responsible for processing the results. This may\ninvolve formatting and writing them to a file, uploading them to a database,\ngenerating plots, etc. WA comes with a few output processors that output\nresults in a few common formats (such as csv or JSON).\n\nYou can add your own output processors by creating a Python file in\n``~/.workload_automation/plugins`` with a class that derives from\n:class:`wa.OutputProcessor <wa.framework.processor.OutputProcessor>`, and should\nimplement the relevant methods shown below, for more information and please\nsee the\n:ref:`Adding an Output Processor <adding-an-output-processor-example>` section.\n\n    :name:\n\n            The name of the output processor, this must be unique to WA.\n\n    :description:\n\n            A description of what the output processor can be used for.\n\n    :parameters:\n\n            A list of additional :class:`Parameters` the output processor can take.\n\n    :initialize(context):\n\n                This method will only be called once during the workload run\n                therefore operations that only need to be performed initially should\n                be performed here.\n\n    :process_job_output(output, target_info, run_ouput):\n\n                This method should be used to perform the processing of the\n                output from an individual job output. This is where any\n                additional artifacts should be generated if applicable.\n\n    :export_job_output(output, target_info, run_ouput):\n\n                This method should be used to perform the exportation of the\n                existing data collected/generated for an individual job. E.g.\n                uploading them to a database etc.\n\n    :process_run_output(output, target_info):\n\n                This method should be used to perform the processing of the\n                output from the run as a whole. This is where any\n                additional artifacts should be generated if applicable.\n\n    :export_run_output(output, target_info):\n\n                This method should be used to perform the exportation of the\n                existing data collected/generated for the run as a whole. E.g.\n                uploading them to a database etc.\n\n    :finalize(context):\n\n                This method is the complement to the initialize method and will also\n                only be called once.\n\n\nThe method names should be fairly self-explanatory. The difference between\n\"process\" and \"export\" methods is that export methods will be invoked after\nprocess methods for all output processors have been generated. Process methods\nmay generate additional artifacts (metrics, files, etc.), while export methods\nshould not -- they should only handle existing results (upload them to  a\ndatabase, archive on a filer, etc).\n\nThe output object passed to job methods is an instance of\n:class:`wa.framework.output.JobOutput`, the output object passed to run methods\nis an instance of :class:`wa.RunOutput <wa.framework.output.RunOutput>`.\n\n\nAdding a Resource Getter\n------------------------\n\nA resource getter is a plugin that is designed to retrieve a resource\n(binaries, APK files or additional workload assets). Resource getters are invoked in\npriority order until one returns the desired resource.\n\nIf you want WA to look for resources somewhere it doesn't by default (e.g. you\nhave a repository of APK files), you can implement a getter for the resource and\nregister it with a higher priority than the standard WA getters, so that it gets\ninvoked first.\n\nInstances of a resource getter should implement the following interface::\n\n    class ResourceGetter(Plugin):\n\n        name = None\n\n        def register(self, resolver):\n            raise NotImplementedError()\n\nThe getter should define a name for itself (as with all plugins), in addition it\nshould implement the ``register`` method. This involves registering a method\nwith the resolver that should used to be called when trying to retrieve a resource\n(typically ``get``) along with it's priority (see `Getter Prioritization`_\nbelow. That method should return an instance of the resource that\nhas been discovered (what \"instance\" means depends on the resource, e.g. it\ncould be a file path), or ``None`` if this getter was unable to discover\nthat resource.\n\nGetter Prioritization\n~~~~~~~~~~~~~~~~~~~~~\n\nA priority is an integer with higher numeric values indicating a higher\npriority. The following standard priority aliases are defined for getters:\n\n\n    :preferred: Take this resource in favour of the environment resource.\n    :local: Found somewhere under ~/.workload_automation/ or equivalent, or\n            from environment variables, external configuration files, etc.\n            These will override resource supplied with the package.\n    :lan: Resource will be retrieved from a locally mounted remote location\n          (such as samba share)\n    :remote: Resource will be downloaded from a remote location (such as an HTTP\n             server)\n    :package: Resource provided with the package.\n\nThese priorities are defined as class members of\n:class:`wa.framework.resource.SourcePriority`, e.g. ``SourcePriority.preferred``.\n\nMost getters in WA will be registered with either ``local`` or\n``package`` priorities. So if you want your getter to override the default, it\nshould typically be registered as ``preferred``.\n\nYou don't have to stick to standard priority levels (though you should, unless\nthere is a good reason). Any integer is a valid priority. The standard priorities\nrange from 0 to 40 in increments of 10.\n\nExample\n~~~~~~~\n\nThe following is an implementation of a getter that searches for files in the\nusers dependencies directory, typically\n``~/.workload_automation/dependencies/<workload_name>`` It uses the\n``get_from_location`` method to filter the available files in the provided\ndirectory appropriately::\n\n    import sys\n\n    from wa import settings,\n    from wa.framework.resource import ResourceGetter, SourcePriority\n    from wa.framework.getters import get_from_location\n    from wa.utils.misc import ensure_directory_exists as _d\n\n    class UserDirectory(ResourceGetter):\n\n        name = 'user'\n\n        def register(self, resolver):\n            resolver.register(self.get, SourcePriority.local)\n\n        def get(self, resource):\n            basepath = settings.dependencies_directory\n            directory = _d(os.path.join(basepath, resource.owner.name))\n            return get_from_location(directory, resource)\n\n.. _adding_a_target:\n\nAdding a Target\n---------------\n\nIn WA3, a 'target' consists of a platform and a devlib target. The\nimplementations of the targets are located in ``devlib``. WA3 will instantiate a\ndevlib target passing relevant parameters parsed from the configuration. For\nmore information about devlib targets please see `the documentation\n<http://devlib.readthedocs.io/en/latest/target.html>`_.\n\nThe currently available platforms are:\n    :generic: The 'standard' platform implementation of the target, this should\n              work for the majority of use cases.\n    :juno: A platform implementation specifically for the juno.\n    :tc2: A platform implementation specifically for the tc2.\n    :gem5: A platform implementation to interact with a gem5 simulation.\n\nThe currently available targets from devlib are:\n    :linux: A device running a Linux based OS.\n    :android: A device running Android OS.\n    :local: Used to run locally on a linux based host.\n    :chromeos: A device running ChromeOS, supporting an android container if available.\n\nFor an example of adding you own customized version of an existing devlib target,\nplease see the how to section :ref:`Adding a Custom Target <adding-custom-target-example>`.\n\n\nOther Plugin Types\n---------------------\n\nIn addition to plugin types covered above, there are few other, more\nspecialized ones. They will not be covered in as much detail. Most of them\nexpose relatively simple interfaces with only a couple of methods and it is\nexpected that if the need arises to extend them, the API-level documentation\nthat accompanies them, in addition to what has been outlined here, should\nprovide enough guidance.\n\n:commands: This allows extending WA with additional sub-commands (to supplement\n           exiting ones outlined in the :ref:`invocation` section).\n:modules: Modules are \"plugins for plugins\". They can be loaded by other\n          plugins to expand their functionality (for example, a flashing\n          module maybe loaded by a device in order to support flashing).\n\n\nPackaging Your Plugins\n----------------------\n\nIf your have written a bunch of plugins, and you want to make it easy to\ndeploy them to new systems and/or to update them on existing systems, you can\nwrap them in a Python package. You can use ``wa create package`` command to\ngenerate appropriate boiler plate. This will create a ``setup.py`` and a\ndirectory for your package that you can place your plugins into.\n\nFor example, if you have a workload inside ``my_workload.py`` and an output\nprocessor in ``my_output_processor.py``, and you want to package them as\n``my_wa_exts`` package, first run the create command ::\n\n        wa create package my_wa_exts\n\nThis will create a ``my_wa_exts`` directory which contains a\n``my_wa_exts/setup.py`` and a subdirectory ``my_wa_exts/my_wa_exts`` which is\nthe package directory for your plugins (you can rename the top-level\n``my_wa_exts`` directory to anything you like -- it's just a \"container\" for the\nsetup.py and the package directory). Once you have that, you can then copy your\nplugins into the package directory, creating\n``my_wa_exts/my_wa_exts/my_workload.py`` and\n``my_wa_exts/my_wa_exts/my_output_processor.py``. If you have a lot of\nplugins, you might want to organize them into subpackages, but only the\ntop-level package directory is created by default, and it is OK to have\neverything in there.\n\n.. note:: When discovering plugins through this mechanism, WA traverses the\n          Python module/submodule tree, not the directory structure, therefore,\n          if you are going to create subdirectories under the top level directory\n          created for you, it is important that your make sure they are valid\n          Python packages; i.e.  each subdirectory must contain a __init__.py\n          (even if blank) in order for the code in that directory and its\n          subdirectories to be discoverable.\n\nAt this stage, you may want to edit ``params`` structure near the bottom of\nthe ``setup.py`` to add correct author, license and contact information (see\n\"Writing the Setup Script\" section in standard Python documentation for\ndetails). You may also want to add a README and/or a COPYING file at the same\nlevel as the setup.py.  Once you have the contents of your package sorted,\nyou can generate the package by running ::\n\n        cd my_wa_exts\n        python setup.py sdist\n\nThis  will generate ``my_wa_exts/dist/my_wa_exts-0.0.1.tar.gz`` package which\ncan then be deployed on the target system with standard Python package\nmanagement tools, e.g. ::\n\n        sudo pip install my_wa_exts-0.0.1.tar.gz\n\nAs part of the installation process, the setup.py in the package, will write the\npackage's name into ``~/.workoad_automation/packages``. This will tell WA that\nthe package contains plugin and it will load them next time it runs.\n\n.. note:: There are no uninstall hooks in ``setuputils``,  so if you ever\n          uninstall your WA plugins package, you will have to manually remove\n          it from ``~/.workload_automation/packages`` otherwise WA will complain\n          about a missing package next time you try to run it.\n"
  },
  {
    "path": "doc/source/developer_information/developer_guide.rst",
    "content": ".. _developer_guide:\n\n***************\nDeveloper Guide\n***************\n\n.. contents::\n   :depth: 3\n   :local:\n\n.. include:: developer_information/developer_guide/writing_plugins.rst\n\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference/contributing.rst",
    "content": "Contributing\n============\n\nCode\n----\n\nWe welcome code contributions via GitHub pull requests. To help with\nmaintainability of the code line we ask that the code uses a coding style\nconsistent with the rest of WA code. Briefly, it is\n\n- `PEP8 <https://www.python.org/dev/peps/pep-0008/>`_ with line length and block\n  comment rules relaxed (the wrapper for PEP8 checker inside ``dev_scripts``\n  will run it with appropriate configuration).\n- Four-space indentation (*no tabs!*).\n- Title-case for class names, underscore-delimited lower case for functions,\n  methods, and variables.\n- Use descriptive variable names. Delimit words with ``'_'`` for readability.\n  Avoid shortening words, skipping vowels, etc (common abbreviations such as\n  \"stats\" for \"statistics\", \"config\" for \"configuration\", etc are OK). Do\n  *not* use Hungarian notation (so prefer ``birth_date`` over ``dtBirth``).\n\nNew extensions should also follow implementation guidelines specified in the\n:ref:`writing-plugins` section of the documentation.\n\nWe ask that the following checks are performed on the modified code prior to\nsubmitting a pull request:\n\n.. note:: You will need pylint and pep8 static checkers installed::\n\n                pip install pep8\n                pip install pylint\n\n           It is recommended that you install via pip rather than through your\n           distribution's package manager because the latter is likely to\n           contain out-of-date version of these tools.\n\n- ``./dev_scripts/pylint`` should be run without arguments and should produce no\n  output (any output should be addressed by making appropriate changes in the\n  code or adding a pylint ignore directive, if there is a good reason for\n  keeping the code as is).\n- ``./dev_scripts/pep8`` should be run without arguments and should produce no\n  output (any output should be addressed by making appropriate changes in the\n  code).\n- If the modifications touch core framework (anything under ``wa/framework``), unit\n  tests should be run using ``nosetests``, and they should all pass.\n\n          - If significant additions have been made to the framework, unit\n            tests should be added to cover the new functionality.\n\n- If modifications have been made to the UI Automation source of a workload, the\n  corresponding APK should be rebuilt and submitted as part of the same pull\n  request. This can be done via the ``build.sh`` script in the relevant\n  ``uiauto`` subdirectory.\n- If modifications have been made to documentation (this includes description\n  attributes for Parameters and Extensions), documentation should be built to\n  make sure no errors or warning during build process, and a visual inspection\n  of new/updated sections in resulting HTML should be performed to ensure\n  everything renders as expected.\n\nOnce you have your contribution is ready, please follow instructions in `GitHub\ndocumentation <https://help.github.com/articles/creating-a-pull-request/>`_ to\ncreate a pull request.\n\n--------------------------------------------------------------------------------\n\nDocumentation\n-------------\n\nHeadings\n~~~~~~~~\n\nTo allow for consistent headings to be used through out the document the\nfollowing character sequences should be used when creating headings\n\n::\n\n        =========\n        Heading 1\n        =========\n\n        Only used for top level headings which should also have an entry in the\n        navigational side bar.\n\n        *********\n        Heading 2\n        *********\n\n        Main page heading used for page title, should not have a top level entry in the\n        side bar.\n\n        Heading 3\n        ==========\n\n        Regular section heading.\n\n        Heading 4\n        ---------\n\n        Sub-heading.\n\n        Heading 5\n        ~~~~~~~~~\n\n        Heading 6\n        ^^^^^^^^^\n\n        Heading 7\n        \"\"\"\"\"\"\"\"\"\n\n\n--------------------------------------------------------------------------------\n\nConfiguration Listings\n~~~~~~~~~~~~~~~~~~~~~~\n\nTo keep a consistent style for presenting configuration options, the preferred\nstyle is to use a `Field List`.\n\n(See: http://docutils.sourceforge.net/docs/user/rst/quickref.html#field-lists)\n\nExample::\n\n        :parameter: My Description\n\nWill render as:\n\n        :parameter: My Description\n\n\n--------------------------------------------------------------------------------\n\nAPI Style\n~~~~~~~~~\n\nWhen documenting an API the currently preferred style is to provide a short\ndescription of the class, followed by the attributes of the class in a\n`Definition List` followed by the methods using the `method` directive.\n\n(See: http://docutils.sourceforge.net/docs/user/rst/quickref.html#definition-lists)\n\n\nExample::\n\n        API\n        ===\n\n        :class:`MyClass`\n        ----------------\n\n        :class:`MyClass` is an example class to demonstrate API documentation.\n\n        ``attribute1``\n            The first attribute of the example class.\n\n        ``attribute2``\n            Another attribute example.\n\n        methods\n        \"\"\"\"\"\"\"\n\n        .. method:: MyClass.retrieve_output(name)\n\n            Retrieve the output for ``name``.\n\n            :param name:  The output that should be returned.\n            :return: An :class:`Output` object for ``name``.\n            :raises NotFoundError: If no output can be found.\n\n\nWill render as:\n\n:class:`MyClass` is an example class to demonstrate API documentation.\n\n``attribute1``\n    The first attribute of the example class.\n\n``attribute2``\n    Another attribute example.\n\nmethods\n^^^^^^^\n\n.. method:: MyClass.retrieve_output(name)\n\n    Retrieve the output for ``name``.\n\n    :param name:  The output that should be returned.\n    :return: An :class:`Output` object for ``name``.\n    :raises NotFoundError: If no output can be found.\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference/framework_overview.rst",
    "content": "Framework Overview\n==================\n\nExecution Model\n---------------\n\nAt the high level, the execution model looks as follows:\n\n.. image:: developer_information/developer_reference/WA_Execution.svg\n   :scale: 100 %\n\nAfter some initial setup, the framework initializes the device, loads and\ninitialized instruments and output processors and begins executing jobs defined\nby the workload specs in the agenda. Each job executes in basic stages:\n\ninitialize\n        Perform any once-per-run initialization of a workload instance, i.e.\n        binary resource resolution.\nsetup\n        Initial setup for the workload is performed. E.g. required assets are\n        deployed to the devices, required services or applications are launched,\n        etc. Run time configuration of the device for the workload is also\n        performed at this time.\nsetup_rerun (apk based workloads only)\n        For some apk based workloads the application is required to be started\n        twice. If the ``requires_rerun`` attribute of the workload is set to\n        ``True`` then after the first setup method is called the application\n        will be killed and then restarted. This method can then be used to\n        perform any additional setup required.\nrun\n        This is when the workload actually runs. This is defined as the part of\n        the workload that is to be measured. Exactly what happens at this stage\n        depends entirely on the workload.\nextract results\n        Extract any results that have been generated during the execution of the\n        workload from the device and back to that target. Any files pulled from\n        the devices should be added as artifacts to the run context.\nupdate output\n        Perform any required parsing and processing of any collected results and\n        add any generated metrics to the run context.\nteardown\n        Final clean up is performed, e.g. applications may closed, files\n        generated during execution deleted, etc.\n\nSignals are dispatched (see :ref:`below <signal_dispatch>`) at each stage of\nworkload execution, which installed instruments can hook into in order to\ncollect measurements, alter workload execution, etc. Instruments implementation\nusually mirrors that of workloads, defining initialization, setup, teardown and\noutput processing stages for a particular instrument. Instead of a ``run``\nmethod instruments usually implement ``start`` and ``stop`` methods instead\nwhich triggered just before and just after a workload run.  However, the signal\ndispatch mechanism gives a high degree of flexibility to instruments allowing\nthem to hook into almost any stage of a WA run (apart from the very early\ninitialization).\n\nMetrics and artifacts generated by workloads and instruments are accumulated by\nthe framework and are then passed to active output processors. This happens\nafter each individual workload execution and at the end of the run. A output\nprocessor may chose to act at either or both of these points.\n\n\nControl Flow\n------------\n\nThis section goes into more detail explaining the relationship between the major\ncomponents of the framework and how control passes between them during a run. It\nwill only go through the major transitions and interactions and will not attempt\nto describe every single thing that happens.\n\n.. note:: This is the control flow for the ``wa run`` command which is the main\n          functionality of WA. Other commands are much simpler and most of what\n          is described below does not apply to them.\n\n#. :class:`wa.framework.entrypoint` parses the command from the arguments, creates a\n   :class:`wa.framework.configuration.execution.ConfigManager` and executes the run\n   command (:class:`wa.commands.run.RunCommand`) passing it the ConfigManger.\n#. Run command initializes the output directory and creates a\n   :class:`wa.framework.configuration.parsers.AgendaParser` and will parser an\n   agenda and populate the ConfigManger based on the command line arguments.\n   Finally it instantiates a :class:`wa.framework.execution.Executor` and\n   passes it the completed ConfigManager.\n#. The Executor uses the ConfigManager to create a\n   :class:`wa.framework.configuration.core.RunConfiguration` and fully defines the\n   configuration for the run (which will be serialised into ``__meta`` subdirectory\n   under the output directory).\n#. The Executor proceeds to instantiate a TargetManager, used to handle the\n   device connection and configuration, and a\n   :class:`wa.framework.execution.ExecutionContext` which is used to track the\n   current state of the run execution and also serves as a means of\n   communication between the core framework and plugins. After this any required\n   instruments and output processors are initialized and installed.\n#. Finally, the Executor instantiates a :class:`wa.framework.execution.Runner`,\n   initializes its job queue with workload specs from the RunConfiguration, and\n   kicks it off.\n#. The Runner performs the run time configuration of the device and goes\n   through the workload specs (in the order defined by ``execution_order``\n   setting), running each spec according to the execution model described in the\n   previous section and sending signals (see below) at appropriate points during\n   execution.\n#. At the end of the run, the control is briefly passed back to the Executor,\n   which outputs a summary for the run.\n\n\n.. _signal_dispatch:\n\nSignal Dispatch\n---------------\n\nWA uses the `louie <https://github.com/11craft/louie/>`_ (formerly,\npydispatcher) library for signal dispatch. Callbacks can be registered for\nsignals emitted during the run. WA uses a version of louie that has been\nmodified to introduce :ref:`priority <prioritization>` to registered callbacks\n(so that callbacks that are know to be slow can be registered with a lower\npriority and therefore do not interfere with other callbacks).\n\nThis mechanism is abstracted for instruments. Methods of an\n:class:`wa.framework.Instrument` subclass automatically get hooked to\nappropriate signals based on their names when the instrument is \"installed\"\nfor the run. Priority can then be specified by adding ``extremely_fast``,\n``very_fast``, ``fast`` , ``slow``, ``very_slow`` or ``extremely_slow``\n:ref:`decorators <instruments_method_map>` to the method definitions.\n\nThe full list of method names and the signals they map to may be seen at the\n:ref:`instrument method map <instruments_method_map>`.\n\nSignal dispatching mechanism may also be used directly, for example to\ndynamically register callbacks at runtime or allow plugins other than\n``Instruments`` to access stages of the run they are normally not aware of.\n\nSignals can be either paired or non paired signals. Non paired signals are one\noff signals that are sent to indicate special events or transitions in execution\nstages have occurred for example ``TARGET_CONNECTED``. Paired signals are used to\nsignify the start and end of a particular event. If the start signal has been\nsent the end signal is guaranteed to also be sent, whether the operation was a\nsuccesses or not, however in the case of correct operation an additional success\nsignal will also be sent. For example in the event of a successful reboot of the\nthe device, the following signals will be sent ``BEFORE_REBOOT``,\n``SUCCESSFUL_REBOOT`` and ``AFTER_REBOOT``.\n\nAn overview of what signals are sent at which point during execution can be seen\nbelow. Most of the paired signals have been removed from the diagram for clarity\nand shown as being dispatched from a particular stage of execution, however in\nreality these signals will be sent just before and just after these stages are\nexecuted. As mentioned above for each of these signals there will be at least 2\nand up to 3 signals sent. If the \"BEFORE_X\" signal (sent just before the stage\nis ran) is sent then the \"AFTER_X\" (sent just after the stage is ran) signal is\nguaranteed to also be sent, and under normal operation a \"SUCCESSFUL_X\" signal\nis also sent just after stage has been completed. The diagram also lists the\nconditional signals that can be sent at any time during execution if something\nunexpected happens, for example an error occurs or the user aborts the run.\n\n.. image:: developer_information/developer_reference/WA_Signal_Dispatch.svg\n   :scale: 100 %\n\nFor more information see :ref:`Instrumentation Signal-Method Mapping <instruments_method_map>`.\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference/plugins.rst",
    "content": ".. plugins:\n\n\nPlugins\n=======\n\nWorkload Automation offers several plugin points (or plugin types). The most\ninteresting of these are\n\n:workloads: These are the tasks that get executed and measured on the device. These\n            can be benchmarks, high-level use cases, or pretty much anything else.\n:targets: These are interfaces to the physical devices (development boards or end-user\n          devices, such as smartphones) that use cases run on. Typically each model of a\n          physical device would require its own interface class (though some functionality\n          may be reused by subclassing from an existing base).\n:instruments: Instruments allow collecting additional data from workload execution (e.g.\n              system traces). Instruments are not specific to a particular workload. Instruments\n              can hook into any stage of workload execution.\n:output processors: These are used to format the results of workload execution once they have been\n                    collected. Depending on the callback used, these will run either after each\n                    iteration and/or at the end of the run, after all of the results have been\n                    collected.\n\nYou can create a plugin by subclassing the appropriate base class, defining\nappropriate methods and attributes, and putting the .py file containing the\nclass into the \"plugins\" subdirectory under ``~/.workload_automation`` (or\nequivalent) where it will be automatically picked up by WA.\n\n\nPlugin Basics\n--------------\n\nThis section contains reference information common to plugins of all types.\n\n.. _context:\n\nThe Context\n~~~~~~~~~~~\n\n.. note:: For clarification on the meaning of \"workload specification\" \"spec\", \"job\"\n  and \"workload\" and the distinction between them, please see the :ref:`glossary <glossary>`.\n\nThe majority of methods in plugins accept a context argument. This is an\ninstance of :class:`wa.framework.execution.ExecutionContext`. It contains\ninformation about the current state of execution of WA and keeps track of things\nlike which workload is currently running.\n\nNotable methods of the context are:\n\n:context.get_resource(resource, strict=True):\n       This method should be used to retrieve a resource using the resource getters rather than using the ResourceResolver directly as this method additionally record any found resources hash in the output metadata.\n\n:context.add_artifact(name, host_file_path, kind, description=None, classifier=None):\n      Plugins can add :ref:`artifacts <artifact>` of various kinds to the run\n      output directory for WA and associate them with a description and/or\n      :ref:`classifier <classifiers>`.\n\n:context.add_metric(name, value, units=None, lower_is_better=False, classifiers=None):\n        This method should be used to add :ref:`metrics <metrics>` that have been\n        generated from a workload, this will allow WA to process the results\n        accordingly depending on which output processors are enabled.\n\nNotable attributes of the context are:\n\n:context.workload:\n        :class:`wa.framework.workload` object that is currently being executed.\n\n:context.tm:\n        This is the target manager that can be used to access various information\n        about the target including initialization parameters.\n\n:context.current_job:\n        This is an instance of :class:`wa.framework.job.Job` and contains all\n        the information relevant to the workload job currently being executed.\n\n:context.current_job.spec:\n        The current workload specification being executed. This is an\n        instance of :class:`wa.framework.configuration.core.JobSpec`\n        and defines the workload and the parameters under which it is\n        being executed.\n\n:context.current_job.current_iteration:\n        The current iteration of the spec that is being executed. Note that this\n        is the iteration for that spec, i.e. the number of times that spec has\n        been run, *not* the total number of all iterations have been executed so\n        far.\n\n:context.job_output:\n        This is the output object for the current iteration which\n        is an instance of :class:`wa.framework.output.JobOutput`. It contains\n        the status of the iteration as well as the metrics and artifacts\n        generated by the job.\n\n\nIn addition to these, context also defines a few useful paths (see below).\n\n\nPaths\n~~~~~\n\nYou should avoid using hard-coded absolute paths in your plugins whenever\npossible, as they make your code too dependent on a particular environment and\nmay mean having to make adjustments when moving to new (host and/or device)\nplatforms. To help avoid hard-coded absolute paths, WA defines a number of\nstandard locations. You should strive to define your paths relative\nto one of these.\n\nOn the host\n^^^^^^^^^^^\n\nHost paths are available through the context object, which is passed to most\nplugin methods.\n\ncontext.run_output_directory\n        This is the top-level output directory for all WA results (by default,\n        this will be \"wa_output\" in the directory in which WA was invoked.\n\ncontext.output_directory\n        This is the output directory for the current iteration. This will an\n        iteration-specific subdirectory under the main results location. If\n        there is no current iteration (e.g. when processing overall run results)\n        this will point to the same location as ``run_output_directory``.\n\n\nAdditionally, the global ``wa.settings`` object exposes on other location:\n\nsettings.dependency_directory\n        this is the root directory for all plugin dependencies (e.g. media\n        files, assets etc) that are not included within the plugin itself.\n\nAs per Python best practice, it is recommended that methods and values in\n``os.path`` standard library module are used for host path manipulation.\n\nOn the target\n^^^^^^^^^^^^^\n\nWorkloads and instruments have a ``target`` attribute, which is an interface to\nthe target used by WA. It defines the following location:\n\ntarget.working_directory\n        This is the directory for all WA-related files on the target. All files\n        deployed to the target should be pushed to somewhere under this location\n        (the only exception being executables installed with ``target.install``\n        method).\n\nSince there could be a mismatch between path notation used by the host and the\ntarget, the ``os.path`` modules should *not* be used for on-target path\nmanipulation. Instead target has an equipment module exposed through\n``target.path`` attribute. This has all the same attributes and behaves the\nsame way as ``os.path``, but is guaranteed to produce valid paths for the target,\nirrespective of the host's path notation. For example:\n\n.. code:: python\n\n    result_file = self.target.path.join(self.target.working_directory, \"result.txt\")\n    self.command = \"{} -a -b -c {}\".format(target_binary, result_file)\n\n.. note:: Output processors, unlike workloads and instruments, do not have their\n          own target attribute as they are designed to be able to be run offline.\n\n.. _plugin-parameters:\n\nParameters\n~~~~~~~~~~~\n\nAll plugins can be parametrized. Parameters are specified using\n``parameters`` class attribute. This should be a list of\n:class:`wa.framework.plugin.Parameter` instances. The following attributes can be\nspecified on parameter creation:\n\n:name:\n        This is the only mandatory argument. The name will be used to create a\n        corresponding attribute in the plugin instance, so it must be a valid\n        Python identifier.\n\n:kind:\n        This is the type of the value of the parameter. This must be an\n        callable. Normally this should be a standard Python type, e.g. ``int``\n        or ``float``, or one the types defined in :mod:`wa.utils.types`.\n        If not explicitly specified, this will default to ``str``.\n\n        .. note:: Irrespective of the ``kind`` specified, ``None`` is always a\n                  valid value for a parameter. If you don't want to allow\n                  ``None``, then set ``mandatory`` (see below) to ``True``.\n\n:allowed_values:\n        A list of the only allowed values for this parameter.\n\n        .. note:: For composite types, such as ``list_of_strings`` or\n                  ``list_of_ints`` in :mod:`wa.utils.types`, each element of\n                  the value  will be checked against ``allowed_values`` rather\n                  than the composite value itself.\n\n:default:\n        The default value to be used for this parameter if one has not been\n        specified by the user. Defaults to ``None``.\n\n:mandatory:\n        A ``bool`` indicating whether this parameter is mandatory. Setting this\n        to ``True`` will make ``None`` an illegal value for the parameter.\n        Defaults to ``False``.\n\n        .. note:: Specifying a ``default`` will mean that this parameter will,\n                  effectively, be ignored (unless the user sets the param to ``None``).\n\n        .. note:: Mandatory parameters are *bad*. If at all possible, you should\n                  strive to provide a sensible ``default`` or to make do without\n                  the parameter. Only when the param is absolutely necessary,\n                  and there really is no sensible default that could be given\n                  (e.g. something like login credentials), should you consider\n                  making it mandatory.\n\n:constraint:\n        This is an additional constraint to be enforced on the parameter beyond\n        its type or fixed allowed values set. This should be a predicate (a function\n        that takes a single argument -- the user-supplied value -- and returns\n        a ``bool`` indicating whether the constraint has been satisfied).\n\n:override:\n        A parameter name must be unique not only within an plugin but also\n        with that plugin's class hierarchy. If you try to declare a parameter\n        with the same name as already exists, you will get an error. If you do\n        want to override a parameter from further up in the inheritance\n        hierarchy, you can indicate that by setting ``override`` attribute to\n        ``True``.\n\n        When overriding, you do not need to specify every other attribute of the\n        parameter, just the ones you what to override. Values for the rest will\n        be taken from the parameter in the base class.\n\n\nValidation and cross-parameter constraints\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nA plugin will get validated at some point after construction. When exactly\nthis occurs depends on the plugin type, but it *will* be validated before it\nis used.\n\nYou can implement ``validate`` method in your plugin (that takes no arguments\nbeyond the ``self``) to perform any additional *internal* validation in your\nplugin. By \"internal\", I mean that you cannot make assumptions about the\nsurrounding environment (e.g. that the device has been initialized).\n\nThe contract for ``validate`` method is that it should raise an exception\n(either ``wa.framework.exception.ConfigError`` or plugin-specific exception type -- see\nfurther on this page) if some validation condition has not, and cannot, been met.\nIf the method returns without raising an exception, then the plugin is in a\nvalid internal state.\n\nNote that ``validate`` can be used not only to verify, but also to impose a\nvalid internal state. In particular, this where cross-parameter constraints can\nbe resolved. If the ``default`` or ``allowed_values`` of one parameter depend on\nanother parameter, there is no way to express that declaratively when specifying\nthe parameters. In that case the dependent attribute should be left unspecified\non creation and should instead be set inside ``validate``.\n\nLogging\n~~~~~~~\n\nEvery plugin class has it's own logger that you can access through\n``self.logger`` inside the plugin's methods. Generally, a :class:`Target` will\nlog everything it is doing, so you shouldn't need to add much additional logging\nfor device actions. However you might what to log additional information,  e.g.\nwhat settings your plugin is using, what it is doing on the host, etc.\n(Operations on the host will not normally be logged, so your plugin should\ndefinitely log what it is doing on the host). One situation in particular where\nyou should add logging is before doing something that might take a significant\namount of time, such as downloading a file.\n\n\nDocumenting\n~~~~~~~~~~~\n\nAll plugins and their parameter should be documented. For plugins\nthemselves, this is done through ``description`` class attribute. The convention\nfor an plugin description is that the first paragraph should be a short\nsummary description of what the plugin does and why one would want to use it\n(among other things, this will get extracted and used by ``wa list`` command).\nSubsequent paragraphs (separated by blank lines) can then provide  a more\ndetailed description, including any limitations and setup instructions.\n\nFor parameters, the description is passed as an argument on creation. Please\nnote that if ``default``, ``allowed_values``, or ``constraint``, are set in the\nparameter, they do not need to be explicitly mentioned in the description (wa\ndocumentation utilities will automatically pull those). If the ``default`` is set\nin ``validate`` or additional cross-parameter constraints exist, this *should*\nbe documented in the parameter description.\n\nBoth plugins and their parameters should be documented using reStructureText\nmarkup (standard markup for Python documentation). See:\n\nhttp://docutils.sourceforge.net/rst.html\n\nAside from that, it is up to you how you document your plugin. You should try\nto provide enough information so that someone unfamiliar with your plugin is\nable to use it, e.g. you should document all settings and parameters your\nplugin expects (including what the valid values are).\n\n\nError Notification\n~~~~~~~~~~~~~~~~~~\n\nWhen you detect an error condition, you should raise an appropriate exception to\nnotify the user. The exception would typically be :class:`ConfigError` or\n(depending the type of the plugin)\n:class:`WorkloadError`/:class:`DeviceError`/:class:`InstrumentError`/:class:`OutputProcessorError`.\nAll these errors are defined in :mod:`wa.framework.exception` module.\n\nA :class:`ConfigError` should be raised where there is a problem in configuration\nspecified by the user (either through the agenda or config files). These errors\nare meant to be resolvable by simple adjustments to the configuration (and the\nerror message should suggest what adjustments need to be made. For all other\nerrors, such as missing dependencies, mis-configured environment, problems\nperforming operations, etc., the plugin type-specific exceptions should be\nused.\n\nIf the plugin itself is capable of recovering from the error and carrying\non, it may make more sense to log an ERROR or WARNING level message using the\nplugin's logger and to continue operation.\n\n.. _metrics:\n\nMetrics\n~~~~~~~\nThis is what WA uses to store a single metric collected from executing a workload.\n\n    :name: the name of the metric. Uniquely identifies the metric\n                 within the results.\n    :value: The numerical value of the metric for this execution of a\n                  workload. This can be either an int or a float.\n    :units: Units for the collected value. Can be None if the value\n                  has no units (e.g. it's a count or a standardised score).\n    :lower_is_better: Boolean flag indicating where lower values are\n                            better than higher ones. Defaults to False.\n    :classifiers: A set of key-value pairs to further classify this\n                        metric beyond current iteration (e.g. this can be used\n                        to identify sub-tests).\n\nMetrics can be added to WA output via the :ref:`context <context>`:\n\n\n.. code-block:: python\n\n  context.add_metric(\"score\", 9001)\n  context.add_metric(\"time\", 2.35, \"seconds\", lower_is_better=True)\n\nYou only need to specify the name and the value for the metric. Units and\nclassifiers are optional, and, if not specified otherwise, it will be assumed\nthat higher values are better (``lower_is_better=False``).\n\nThe metric will be added to the result for the current job, if there is one;\notherwise, it will be added to the overall run result.\n\n.. _artifact:\n\nArtifacts\n~~~~~~~~~\nThis is an artifact generated during execution/post-processing of a workload.\nUnlike :ref:`metrics <metrics>`, this represents an actual artifact, such as a\nfile, generated.  This may be \"output\", such as trace, or it could be \"meta\ndata\" such as logs.  These are distinguished using the ``kind`` attribute, which\nalso helps WA decide how it should be handled. Currently supported kinds are:\n\n        :log: A log file. Not part of the \"output\" as such but contains\n              information about the run/workload execution that be useful for\n              diagnostics/meta analysis.\n        :meta: A file containing metadata. This is not part of the \"output\", but\n               contains information that may be necessary to reproduce the\n               results (contrast with ``log`` artifacts which are *not*\n               necessary).\n        :data: This file contains new data, not available otherwise and should\n               be considered part of the \"output\" generated by WA. Most traces\n               would fall into this category.\n        :export: Exported version of results or some other artifact. This\n                 signifies that this artifact does not contain any new data\n                 that is not available elsewhere and that it may be safely\n                 discarded without losing information.\n        :raw: Signifies that this is a raw dump/log that is normally processed\n              to extract useful information and is then discarded. In a sense,\n              it is the opposite of ``export``, but in general may also be\n              discarded.\n\n              .. note:: whether a file is marked as ``log``/``data`` or ``raw``\n                        depends on how important it is to preserve this file,\n                        e.g. when archiving, vs how much space it takes up.\n                        Unlike ``export`` artifacts which are (almost) always\n                        ignored by other exporters as that would never result\n                        in data loss, ``raw`` files *may* be processed by\n                        exporters if they decided that the risk of losing\n                        potentially (though unlikely) useful data is greater\n                        than the time/space cost of handling the artifact (e.g.\n                        a database uploader may choose to ignore ``raw``\n                        artifacts, whereas a network filer archiver may choose\n                        to archive them).\n\n        .. note: The kind parameter is intended to represent the logical\n                 function of a particular artifact, not it's intended means of\n                 processing -- this is left entirely up to the output\n                 processors.\n\nAs with :ref:`metrics`, artifacts are added via the :ref:`context <context>`:\n\n.. code-block:: python\n\n  context.add_artifact(\"benchmark-output\", \"bech-out.txt\", kind=\"raw\",\n                       description=\"stdout from running the benchmark\")\n\n.. note:: The file *must* exist on the host by the point at which the artifact\n          is added, otherwise an error will be raised.\n\nThe artifact will be added to the result of the current job, if there is one;\notherwise, it will be added to the overall run result. In some situations, you\nmay wish to add an artifact to the overall run while being inside a job context,\nthis can be done with ``add_run_artifact``:\n\n.. code-block:: python\n\n  context.add_run_artifact(\"score-summary\", \"scores.txt\", kind=\"export\",\n         description=\"\"\"\n         Summary of the scores so far. Updated after\n         every job.\n         \"\"\")\n\nIn this case, you also need to make sure that the file represented by the\nartifact is written to the output directory for the run and not the current job.\n\n.. _metadata:\n\nMetadata\n~~~~~~~~\n\nThere may be additional data collected by your plugin that you want to record as\npart of the result, but that does not fall under the definition of a \"metric\".\nFor example, you may want to record the version of the binary you're executing.\nYou can do this by adding a metadata entry:\n\n.. code-block:: python\n\n  context.add_metadata(\"exe-version\", 1.3)\n\n\nMetadata will be added either to the current job result, or to the run result,\ndepending on the current context. Metadata values can be scalars or nested\nstructures of dicts/sequences; the only constraint is that all constituent\nobjects of the value must be POD (Plain Old Data) types -- see :ref:`WA POD\ntypes <wa-pods>`.\n\nThere is special support for handling metadata entries that are dicts of values.\nThe following call adds a metadata entry ``\"versions\"`` who's value is\n``{\"my_exe\": 1.3}``:\n\n.. code-block:: python\n\n  context.add_metadata(\"versions\", \"my_exe\", 1.3)\n\nIf you attempt to add a metadata entry that already exists, an error will be\nraised, unless ``force=True`` is specified, in which case, it will be\noverwritten.\n\nUpdating an existing entry whose value is a collection can be done with\n``update_metadata``:\n\n.. code-block:: python\n\n  context.update_metadata(\"ran_apps\", \"my_exe\")\n  context.update_metadata(\"versions\", \"my_other_exe\", \"2.3.0\")\n\nThe first call appends ``\"my_exe\"`` to the list at metadata entry\n``\"ran_apps\"``. The second call updates the ``\"versions\"`` dict in the metadata\nwith an entry for ``\"my_other_exe\"``.\n\nIf an entry does not exit, ``update_metadata`` will create it, so it's\nrecommended to always use that for non-scalar entries, unless the intention is\nspecifically to ensure that the entry does not exist at the time of the call.\n\n.. _classifiers:\n\nClassifiers\n~~~~~~~~~~~\n\nClassifiers are key-value pairs of tags that can be attached to metrics,\nartifacts, jobs, or the entire run. Run and job classifiers get propagated to\nmetrics and artifacts. Classifier keys should be strings, and their values\nshould be simple scalars (i.e. strings, numbers, or bools).\n\nClassifiers can be thought of as \"tags\" that are used to annotate metrics and\nartifacts, in order to make it easier to sort through them later. WA itself does\nnot do anything with them, however output processors will augment the output\nthey generate with them (for example, ``csv`` processor can add additional\ncolumns for classifier keys).\n\nClassifiers are typically added by the user to attach some domain-specific\ninformation (e.g. experiment configuration identifier) to the results, see\n:ref:`using classifiers <using-classifiers>`. However, plugins can also attach\nadditional classifiers, by specifying them in ``add_metric()`` and\n``add_artifacts()`` calls.\n\n\nMetadata vs Classifiers\n~~~~~~~~~~~~~~~~~~~~~~~\n\nBoth metadata and classifiers are sets of essentially opaque key-value pairs\nthat get included in WA output. While they may seem somewhat similar and\ninterchangeable, they serve different purposes and are handled differently by\nthe framework.\n\nClassifiers are used to annotate generated metrics and artifacts in order to\nassist post-processing tools in sorting through them. Metadata is used to record\nadditional information that is not necessary for processing the results, but\nthat may be needed in order to reproduce them or to make sense of them in a\ngrander context.\n\nThese are specific differences in how they are handled:\n\n- Classifiers are often provided by the user via the agenda (though can also be\n  added by plugins). Metadata in only created by the framework and plugins.\n- Classifier values must be simple scalars; metadata values can be nested\n  collections, such as lists or dicts.\n- Classifiers are used by output processors to augment the output the latter\n  generated; metadata typically isn't.\n- Classifiers are essentially associated with the individual metrics and\n  artifacts (though in the agenda they're specified at workload, section, or\n  global run levels); metadata is associated with a particular job or run, and\n  not with metrics or artifacts.\n\n--------------------\n\n.. _execution-decorators:\n\nExecution Decorators\n---------------------\n\nThe following decorators are available for use in order to control how often a\nmethod should be able to be executed.\n\nFor example, if we want to ensure that no matter how many iterations of a\nparticular workload are ran, we only execute the initialize method for that instance\nonce, we would use the decorator as follows:\n\n.. code-block:: python\n\n    from wa.utils.exec_control import once\n\n    @once\n    def initialize(self, context):\n        # Perform one time initialization e.g. installing a binary to target\n        # ..\n\n@once_per_instance\n~~~~~~~~~~~~~~~~~~\nThe specified method will be invoked only once for every bound instance within\nthe environment.\n\n@once_per_class\n~~~~~~~~~~~~~~~\nThe specified method will be invoked only once for all instances of a class\nwithin the environment.\n\n@once\n~~~~~\nThe specified method will be invoked only once within the environment.\n\n.. warning:: If a method containing a super call is decorated, this will also cause\n             stop propagation up the hierarchy, unless this is the desired\n             effect, additional functionality should be implemented in a\n             separate decorated method which can then be called allowing for\n             normal propagation to be retained.\n\n\n--------------------\n\nUtils\n-----\n\nWorkload Automation defines a number of utilities collected under\n:mod:`wa.utils` subpackage. These utilities were created to help with the\nimplementation of the framework itself, but may be also be useful when\nimplementing plugins.\n\n--------------------\n\nWorkloads\n---------\n\nAll of the type inherit from the same base :class:`Workload` and its API can be\nseen in the :ref:`API <workload-api>` section.\n\nWorkload methods (except for ``validate``) take a single argument that is a\n:class:`wa.framework.execution.ExecutionContext` instance. This object keeps\ntrack of the current execution state (such as the current workload, iteration\nnumber, etc), and contains, among other things, a\n:class:`wa.framework.output.JobOutput` instance that should be populated from\nthe ``update_output`` method with the results of the execution. For more\ninformation please see `the context`_ documentation. ::\n\n        # ...\n\n        def update_output(self, context):\n           # ...\n           context.add_metric('energy', 23.6, 'Joules', lower_is_better=True)\n\n        # ...\n\n.. _workload-types:\n\nWorkload Types\n~~~~~~~~~~~~~~~~\n\nThere are multiple workload types that you can inherit from depending on the\npurpose of your workload, the different types along with an output of their\nintended use cases are outlined below.\n\n.. _basic-workload:\n\nBasic (:class:`wa.Workload <wa.framework.workload.Workload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nThis type of the workload is the simplest type of workload and is left the to\ndeveloper to implement its full functionality.\n\n\n.. _apk-workload:\n\nApk (:class:`wa.ApkWorkload <wa.framework.workload.ApkWorkload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nThis workload will simply deploy and launch an android app in its basic form\nwith no UI interaction.\n\n.. _uiautomator-workload:\n\n\nUiAuto (:class:`wa.UiautoWorkload <wa.framework.workload.UiautoWorkload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nThis workload is for android targets which will use UiAutomator to interact with\nUI elements without a specific android app, for example performing manipulation\nof android itself. This is the preferred type of automation as the results are\nmore portable and reproducible due to being able to wait for UI elements to\nappear rather than having to rely on human recordings.\n\n.. _apkuiautomator-workload:\n\nApkUiAuto (:class:`wa.ApkUiautoWorkload <wa.framework.workload.ApkUiautoWorkload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nThe is the same as the UiAuto workload however it is also associated with an\nandroid app e.g. AdobeReader and will automatically deploy and launch the\nandroid app before running the automation.\n\n.. _revent-workload:\n\nRevent (:class:`wa.ReventWorkload <wa.framework.workload.ReventWorkload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nRevent workloads are designed primarily for games as these are unable to be\nautomated with UiAutomator due to the fact that they are rendered within a\nsingle UI element. They require a recording to be performed manually and\ncurrently will need re-recording for each different device. For more\ninformation on revent workloads been please see :ref:`revent_files_creation`\n\n.. _apkrevent-workload:\n\nAPKRevent (:class:`wa.ApkReventWorkload <wa.framework.workload.ApkReventWorkload>`)\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nThe is the same as the Revent workload however it is also associated with an\nandroid app e.g. AngryBirds and will automatically deploy and launch the android\napp before running the automation.\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference/revent.rst",
    "content": "Revent Recordings\n=================\n\nConvention for Naming revent Files for Revent Workloads\n-------------------------------------------------------------------------------\n\nThere is a convention for naming revent files which you should follow if you\nwant to record your own revent files. Each revent file must be called (case sensitive)\n``<device name>.<stage>.revent``,\nwhere ``<device name>`` is the name of your device (as defined by the model\nname of your device which can be retrieved with\n``adb shell getprop ro.product.model`` or by the ``name`` attribute of your\ncustomized device class), and ``<stage>`` is one of the following currently\nsupported stages:\n\n        :setup: This stage is where the application is loaded (if present). It is\n                a good place to record an revent here to perform any tasks to get\n                ready for the main part of the workload to start.\n        :run: This stage is where the main work of the workload should be performed.\n              This will allow for more accurate results if the revent file for this\n              stage only records the main actions under test.\n        :extract_results: This stage is used after the workload has been completed\n                          to retrieve any metrics from the workload e.g. a score.\n        :teardown: This stage is where any final actions should be performed to\n                   clean up the workload.\n\nOnly the run stage is mandatory, the remaining stages will be replayed if a\nrecording is present otherwise no actions will be performed for that particular\nstage.\n\nAll your custom revent files should reside at\n``'$WA_USER_DIRECTORY/dependencies/WORKLOAD NAME/'``. So\ntypically to add a custom revent files for a device named \"mydevice\" and a\nworkload name \"myworkload\", you would need to add the revent files to the\ndirectory ``~/.workload_automation/dependencies/myworkload/revent_files``\ncreating the directory structure if necessary. ::\n\n    mydevice.setup.revent\n    mydevice.run.revent\n    mydevice.extract_results.revent\n    mydevice.teardown.revent\n\nAny revent file in the dependencies will always overwrite the revent file in the\nworkload directory. So for example it is possible to just provide one revent for\nsetup in the dependencies and use the run.revent that is in the workload directory.\n\n\nFile format of revent recordings\n--------------------------------\n\nYou do not need to understand recording format in order to use revent. This\nsection is intended for those looking to extend revent in some way, or to\nutilize revent recordings for other purposes.\n\nFormat Overview\n~~~~~~~~~~~~~~~\n\nRecordings are stored in a binary format. A recording consists of three\nsections::\n\n    +-+-+-+-+-+-+-+-+-+-+-+\n    |       Header        |\n    +-+-+-+-+-+-+-+-+-+-+-+\n    |                     |\n    |  Device Description |\n    |                     |\n    +-+-+-+-+-+-+-+-+-+-+-+\n    |                     |\n    |                     |\n    |     Event Stream    |\n    |                     |\n    |                     |\n    +-+-+-+-+-+-+-+-+-+-+-+\n\nThe header contains metadata describing the recording. The device description\ncontains information about input devices involved in this recording. Finally,\nthe event stream contains the recorded input events.\n\nAll fields are either fixed size or prefixed with their length or the number of\n(fixed-sized) elements.\n\n.. note:: All values below are little endian\n\n\nRecording Header\n~~~~~~~~~~~~~~~~\n\nAn revent recoding header has the following structure\n\n * It starts with the \"magic\" string ``REVENT`` to indicate that this is an\n   revent recording.\n * The magic is followed by a 16 bit version number. This indicates the format\n   version of the recording that follows. Current version is ``2``.\n * The next 16 bits indicate the type of the recording. This dictates the\n   structure of the Device Description section. Valid values are:\n\n        ``0``\n                This is a general input event recording. The device description\n                contains a list of paths from which the events where recorded.\n        ``1``\n                This a gamepad recording. The device description contains the\n                description of the gamepad used to create the recording.\n\n * The header is zero-padded to 128 bits.\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |      'R'      |      'E'      |      'V'      |      'E'      |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |      'N'      |      'T'      |            Version            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |             Mode              |            PADDING            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                            PADDING                            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nDevice Description\n~~~~~~~~~~~~~~~~~~\n\nThis section describes the input devices used in the recording. Its structure is\ndetermined by the value of ``Mode`` field in the header.\n\nGeneral Recording\n~~~~~~~~~~~~~~~~~\n\n.. note:: This is the only format supported prior to version ``2``.\n\nThe recording has been made from all available input devices. This section\ncontains the list of ``/dev/input`` paths for the devices, prefixed with total\nnumber of the devices recorded.\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                       Number of devices                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |             Device paths              +-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nSimilarly, each device path is a length-prefixed string. Unlike C strings, the\npath is *not* NULL-terminated.\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                     Length of device path                     |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                          Device path                          |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nGamepad Recording\n~~~~~~~~~~~~~~~~~\n\nThe recording has been made from a specific gamepad. All events in the stream\nwill be for that device only. The section describes the device properties that\nwill be used to create a virtual input device using ``/dev/uinput``. Please\nsee ``linux/input.h`` header in the Linux kernel source for more information\nabout the fields in this section.\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |            bustype            |             vendor            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |            product            |            version            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                         name_length                           |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                             name                              |\n    |                                                               |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                            ev_bits                            |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                                                               |\n    |                       key_bits (96 bytes)                     |\n    |                                                               |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                                                               |\n    |                       rel_bits (96 bytes)                     |\n    |                                                               |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                                                               |\n    |                       abs_bits (96 bytes)                     |\n    |                                                               |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                          num_absinfo                          |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                                                               |\n    |                                                               |\n    |                                                               |\n    |                        absinfo entries                        |\n    |                                                               |\n    |                                                               |\n    |                                                               |\n    |                                                               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nEach ``absinfo`` entry consists of six 32 bit values. The number of entries is\ndetermined by the ``abs_bits`` field.\n\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                            value                              |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                           minimum                             |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                           maximum                             |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                             fuzz                              |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                             flat                              |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                          resolution                           |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nEvent Stream\n~~~~~~~~~~~~\n\nThe majority of an revent recording will be made up of the input events that were\nrecorded. The event stream is prefixed with the number of events in the stream,\nand start and end times for the recording.\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                        Number of events                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                  Number of events (cont.)                     |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                      Start Time Seconds                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                  Start Time Seconds (cont.)                   |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                    Start Time Microseconds                    |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |              Start Time Microseconds (cont.)                  |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                        End Time Seconds                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                    End Time Seconds (cont.)                   |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                      End Time Microseconds                    |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                End Time Microseconds (cont.)                  |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                                               |\n    |                                                               |\n    |             Events                                            |\n    |                                                               |\n    |                                                               |\n    |                                       +-+-+-+-+-+-+-+-+-+-+-+-+\n    |                                       |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nEvent Structure\n~~~~~~~~~~~~~~~\n\nEach event entry structured as follows:\n\n * An unsigned short integer representing which device from the list of device paths\n   this event is for (zero indexed). E.g. Device ID = 3 would be the 4th\n   device in the list of device paths.\n * A unsigned long integer representing the number of seconds since \"epoch\" when\n   the event was recorded.\n * A unsigned long integer representing the microseconds part of the timestamp.\n * An unsigned integer representing the event type\n * An unsigned integer representing the event code\n * An unsigned integer representing the event value\n\nFor more information about the event type, code and value please read:\nhttps://www.kernel.org/doc/Documentation/input/event-codes.txt\n\n::\n\n     0                   1                   2                   3\n     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |           Device ID           |        Timestamp Seconds      |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |                       Timestamp Seconds (cont.)               |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |   Timestamp Seconds (cont.)   |        stamp Micoseconds      |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |              Timestamp Micoseconds (cont.)                    |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    | Timestamp Micoseconds (cont.) |          Event Type           |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |          Event Code           |          Event Value          |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n    |       Event Value (cont.)     |\n    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n\nParser\n~~~~~~\n\nWA has a parser for revent recordings. This can be used to work with revent\nrecordings in scripts. Here is an example:\n\n.. code:: python\n\n    from wa.utils.revent import ReventRecording\n\n    with ReventRecording('/path/to/recording.revent') as recording:\n        print(\"Recording: {}\".format(recording.filepath))\n        print(\"There are {} input events\".format(recording.num_events))\n        print(\"Over a total of {} seconds\".format(recording.duration))\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference/serialization.rst",
    "content": ".. _serialization:\n\nSerialization\n=============\n\nOverview of Serialization\n-------------------------\n\nWA employs a serialization mechanism in order to store some of its internal\nstructures inside the output directory. Serialization is performed in two\nstages:\n\n1. A serializable object is converted into a POD (Plain Old Data) structure\n   consisting of primitive Python types, and a few additional types (see\n   :ref:`wa-pods` below).\n2. The POD structure is serialized into a particular format by a generic\n   parser for that format. Currently, `yaml` and `json` are supported.\n\nDeserialization works in reverse order -- first the serialized text is parsed\ninto a POD, which is then converted to the appropriate object.\n\n\nImplementing Serializable Objects\n---------------------------------\n\nIn order to be considered serializable, an object must either be a POD, or it\nmust implement the ``to_pod()`` method and ``from_pod`` static/class method,\nwhich will perform the conversion to/form pod.\n\nAs an example, below as a (somewhat trimmed) implementation of the ``Event``\nclass:\n\n.. code-block:: python\n\n    class Event(object):\n\n        @staticmethod\n        def from_pod(pod):\n            instance = Event(pod['message'])\n            instance.timestamp = pod['timestamp']\n            return instance\n\n        def __init__(self, message):\n            self.timestamp = datetime.utcnow()\n            self.message = message\n\n        def to_pod(self):\n            return dict(\n                timestamp=self.timestamp,\n                message=self.message,\n            )\n\n\nSerialization API\n-----------------\n\n.. function:: read_pod(source, fmt=None)\n.. function:: write_pod(pod, dest, fmt=None)\n\n    These read and write PODs from a file. The format will be inferred, if\n    possible, from the extension of the file, or it may be specified explicitly\n    with ``fmt``. ``source`` and ``dest`` can be either strings, in which case\n    they will be interpreted as paths, or they can be file-like objects.\n\n.. function:: is_pod(obj)\n\n    Returns ``True`` if ``obj`` is a POD, and ``False`` otherwise.\n\n.. function:: dump(o, wfh, fmt='json', \\*args, \\*\\*kwargs)\n.. function:: load(s, fmt='json', \\*args, \\*\\*kwargs)\n\n    These implment an altenative serialization interface, which matches the\n    interface exposed by the parsers for the supported formats.\n\n\n.. _wa-pods:\n\nWA POD Types\n------------\n\nPOD types are types that can be handled by a serializer directly, without a need\nfor any additional information. These consist of the build-in python types ::\n\n    list\n    tuple\n    dict\n    set\n    str\n    unicode\n    int\n    float\n    bool\n\n...the standard library types ::\n\n    OrderedDict\n    datetime\n\n...and the WA-defined types ::\n\n    regex_type\n    none_type\n    level\n    cpu_mask\n\nAny structure consisting entirely of these types is a POD and can be serialized\nand then deserialized without losing information. It is important to note that\nonly these specific types are considered POD, their subclasses are *not*.\n\n.. note:: ``dict``\\ s get deserialized as ``OrderedDict``\\ s.\n\n\nSerialization Formats\n---------------------\n\nWA utilizes two serialization formats: YAML and JSON. YAML is used for files\nintended to be primarily written and/or read by humans; JSON is used for files\nintended to be primarily written and/or read by WA and other programs.\n\nThe parsers and serializers for these formats used by WA have been modified to\nhandle additional types (e.g. regular expressions) that are typically not\nsupported by the formats. This was done in such a way that the resulting files\nare still valid and can be parsed by any parser for that format.\n"
  },
  {
    "path": "doc/source/developer_information/developer_reference.rst",
    "content": ".. _developer_reference:\n\n********************\nDeveloper Reference\n********************\n\n.. contents::\n   :depth: 3\n   :local:\n\n\n.. include:: developer_information/developer_reference/framework_overview.rst\n\n-----------------\n\n.. include:: developer_information/developer_reference/plugins.rst\n\n-----------------\n\n.. include:: developer_information/developer_reference/revent.rst\n\n-----------------\n\n.. include:: developer_information/developer_reference/serialization.rst\n\n-----------------\n\n.. include:: developer_information/developer_reference/contributing.rst\n\n"
  },
  {
    "path": "doc/source/developer_information/how_to.rst",
    "content": "*******\nHow Tos\n*******\n\n.. contents:: Contents\n   :depth: 4\n   :local:\n\n.. include:: developer_information/how_tos/adding_plugins.rst\n\n.. include:: developer_information/how_tos/processing_output.rst\n"
  },
  {
    "path": "doc/source/developer_information/how_tos/adding_plugins.rst",
    "content": ".. _deploying-executables-example:\n\nDeploying Executables\n=====================\n\nInstalling binaries for a particular plugin should generally only be performed\nonce during a run. This should typically be done in the ``initialize`` method,\nif the only functionality performed in the method is to install the required binaries\nthen the ``initialize`` method should be decorated with the ``@once``\n:ref:`decorator <execution-decorators>` otherwise this should be placed into a dedicated\nmethod which is decorated instead. Please note if doing this then any installed\npaths should be added as class attributes rather than instance variables. As a\ngeneral rule if binaries are installed as part of ``initialize`` then they\nshould be uninstalled in the complementary ``finalize`` method.\n\nPart of an example workload demonstrating this is shown below:\n\n.. code:: python\n\n  class MyWorkload(Workload):\n        #..\n        @once\n        def initialize(self, context):\n            resource = Executable(self, self.target.abi, 'my_executable')\n            host_binary = context.resolver.get(resource)\n            MyWorkload.target_binary = self.target.install(host_binary)\n        #..\n\n        def setup(self, context):\n            self.command = \"{} -a -b -c\".format(self.target_binary)\n            self.target.execute(self.command)\n        #..\n\n        @once\n        def finalize(self, context):\n            self.target.uninstall('my_executable')\n\n\n.. _adding-a-workload-example:\n\nAdding a Workload\n=================\n\nThe easiest way to create a new workload is to use the\n:ref:`create <create-command>` command. ``wa create workload <args>``.  This\nwill use predefined templates to create a workload based on the options that are\nsupplied to be used as a starting point for the workload. For more information\non using the create workload command see ``wa create workload -h``\n\nThe first thing to decide is the type of workload you want to create depending\non the OS you will be using and the aim of the workload. The are currently 6\navailable workload types to choose as detailed in the\n:ref:`Developer Reference <workload-types>`.\n\nOnce you have decided what type of workload you wish to choose this can be\nspecified with ``-k <workload_kind>`` followed by the workload name. This\nwill automatically generate a workload in the your ``WA_CONFIG_DIR/plugins``. If\nyou wish to specify a custom location this can be provided with ``-p\n<path>``\n\nA typical invocation of the :ref:`create <create-command>` command would be in\nthe form::\n\n        wa create workload -k <workload_kind> <workload_name>\n\n\n.. _adding-a-basic-workload-example:\n\nAdding a Basic Workload\n-----------------------\n\nTo add a ``basic`` workload template for our example workload we can simply use the\ncommand::\n\n        wa create workload -k basic ziptest\n\nThis will generate a very basic workload with dummy methods for the each method in\nthe workload interface and it is left to the developer to add any required functionality.\n\nNot all the methods from the interface are required to be implemented, this\nexample shows how a subset might be used to implement a simple workload that\ntimes how long it takes to compress a file of a particular size on the device.\n\n\n.. note:: This is intended as an example of how to implement the Workload\n   :ref:`interface <workload-api>`. The methodology used to\n   perform the actual measurement is not necessarily sound, and this\n   Workload should not be used to collect real measurements.\n\nThe first step is to subclass our desired\n:ref:`workload type <workload-types>` depending on the purpose of our workload,\nin this example we are implementing a very simple workload and do not\nrequire any additional feature so shall inherit directly from the the base\n:class:`Workload` class. We then need to provide a ``name`` for our workload\nwhich is what will be used to identify your workload for example in an\nagenda or via the show command, if you used the `create` command this will\nalready be populated for you.\n\n.. code-block:: python\n\n    import os\n    from wa import Workload, Parameter\n\n    class ZipTest(Workload):\n\n        name = 'ziptest'\n\nThe ``description`` attribute should be a string in the structure of a short\nsummary of the purpose of the workload, and will be shown when using the\n:ref:`list command <list-command>`, followed by a more in- depth explanation\nseparated by a new line.\n\n.. code-block:: python\n\n        description = '''\n                      Times how long it takes to gzip a file of a particular size on a device.\n\n                      This workload was created for illustration purposes only. It should not be\n                      used to collect actual measurements.\n                      '''\n\nIn order to allow for additional configuration of the workload from a user a\nlist of :ref:`parameters <plugin-parameters>` can be supplied. These can be\nconfigured in a variety of different ways. For example here we are ensuring that\nthe value of the parameter is an integer and larger than 0 using the ``kind``\nand ``constraint`` options, also if no value is provided we are providing a\n``default`` value of 2000000. These parameters will automatically have their\nvalue set as an attribute of the workload so later on we will be able to use the\nvalue provided here as ``self.file_size``.\n\n.. code-block:: python\n\n        parameters = [\n                Parameter('file_size', kind=int, default=2000000,\n                          constraint=lambda x: 0 < x,\n                          description='Size of the file (in bytes) to be gzipped.')\n        ]\n\nNext we will implement our ``setup`` method. This is where we do any preparation\nthat is required before the workload is ran, this is usually things like setting\nup required files on the device and generating commands from user input. In this\ncase we will generate our input file on the host system and then push it to a\nknown location on the target for use in the 'run' stage.\n\n.. code-block:: python\n\n        def setup(self, context):\n            super(ZipTestWorkload, self).setup(context)\n            # Generate a file of the specified size containing random garbage.\n            host_infile = os.path.join(context.output_directory, 'infile')\n            command = 'openssl rand -base64 {} > {}'.format(self.file_size, host_infile)\n            os.system(command)\n            # Set up on-device paths\n            devpath = self.target.path  # os.path equivalent for the target\n            self.target_infile = devpath.join(self.target.working_directory, 'infile')\n            self.target_outfile = devpath.join(self.target.working_directory, 'outfile')\n            # Push the file to the target\n            self.target.push(host_infile, self.target_infile)\n\n\nThe ``run`` method is where the actual 'work' of the workload takes place and is\nwhat is measured by any instrumentation. So for this example this is the\nexecution of creating the zip file on the target.\n\n.. code-block:: python\n\n        def run(self, context):\n            cmd = 'cd {} && (time gzip {}) &>> {}'\n            self.target.execute(cmd.format(self.target.working_directory,\n                                           self.target_infile,\n                                           self.target_outfile))\n\nThe ``extract_results`` method is used to extract any results from the target\nfor example we want to pull the file containing the timing information that we\nwill use to generate metrics for our workload and then we add this file as an\nartifact with a 'raw' kind, which means once WA has finished processing it will\nallow it to decide whether to keep the file or not.\n\n.. code-block:: python\n\n        def extract_results(self, context):\n            super(ZipTestWorkload, self).extract_results(context)\n            # Pull the results file to the host\n            self.host_outfile = os.path.join(context.output_directory, 'timing_results')\n            self.target.pull(self.target_outfile, self.host_outfile)\n            context.add_artifact('ziptest-results', self.host_outfile, kind='raw')\n\nThe ``update_output`` method we can do any generation of metrics that we wish to\nfor our workload. In this case we are going to simply convert the times reported\ninto seconds and add them as 'metrics' to WA which can then be displayed to the\nuser along with any others in a format dependant on which output processors they\nhave enabled for the run.\n\n.. code-block:: python\n\n        def update_output(self, context):\n            super(ZipTestWorkload, self).update_output(context)\n            # Extract metrics form the file's contents and update the result\n            # with them.\n            content = iter(open(self.host_outfile).read().strip().split())\n            for value, metric in zip(content, content):\n                mins, secs = map(float, value[:-1].split('m'))\n                context.add_metric(metric, secs + 60 * mins, 'seconds')\n\nFinally in the ``teardown`` method we will perform any required clean up for the\nworkload so we will delete the input and output files from the device.\n\n.. code-block:: python\n\n        def teardown(self, context):\n            super(ZipTestWorkload, self).teardown(context)\n            self.target.remove(self.target_infile)\n            self.target.remove(self.target_outfile)\n\nThe full implementation of this workload would look something like:\n\n.. code-block:: python\n\n    import os\n    from wa import Workload, Parameter\n\n    class ZipTestWorkload(Workload):\n\n        name = 'ziptest'\n\n        description = '''\n                      Times how long it takes to gzip a file of a particular size on a device.\n\n                      This workload was created for illustration purposes only. It should not be\n                      used to collect actual measurements.\n                      '''\n\n        parameters = [\n                Parameter('file_size', kind=int, default=2000000,\n                          constraint=lambda x: 0 < x,\n                          description='Size of the file (in bytes) to be gzipped.')\n        ]\n\n        def setup(self, context):\n            super(ZipTestWorkload, self).setup(context)\n            # Generate a file of the specified size containing random garbage.\n            host_infile = os.path.join(context.output_directory, 'infile')\n            command = 'openssl rand -base64 {} > {}'.format(self.file_size, host_infile)\n            os.system(command)\n            # Set up on-device paths\n            devpath = self.target.path  # os.path equivalent for the target\n            self.target_infile = devpath.join(self.target.working_directory, 'infile')\n            self.target_outfile = devpath.join(self.target.working_directory, 'outfile')\n            # Push the file to the target\n            self.target.push(host_infile, self.target_infile)\n\n        def run(self, context):\n            cmd = 'cd {} && (time gzip {}) &>> {}'\n            self.target.execute(cmd.format(self.target.working_directory,\n                                           self.target_infile,\n                                           self.target_outfile))\n        def extract_results(self, context):\n            super(ZipTestWorkload, self).extract_results(context)\n            # Pull the results file to the host\n            self.host_outfile = os.path.join(context.output_directory, 'timing_results')\n            self.target.pull(self.target_outfile, self.host_outfile)\n            context.add_artifact('ziptest-results', self.host_outfile, kind='raw')\n\n        def update_output(self, context):\n            super(ZipTestWorkload, self).update_output(context)\n            # Extract metrics form the file's contents and update the result\n            # with them.\n            content = iter(open(self.host_outfile).read().strip().split())\n            for value, metric in zip(content, content):\n                mins, secs = map(float, value[:-1].split('m'))\n                context.add_metric(metric, secs + 60 * mins, 'seconds')\n\n        def teardown(self, context):\n            super(ZipTestWorkload, self).teardown(context)\n            self.target.remove(self.target_infile)\n            self.target.remove(self.target_outfile)\n\n\n\n.. _apkuiautomator-example:\n\nAdding a ApkUiAutomator Workload\n--------------------------------\n\nIf we wish to create a workload to automate the testing of the Google Docs\nandroid app, we would choose to perform the automation using UIAutomator and we\nwould want to automatically deploy and install the apk file to the target,\ntherefore we would choose the :ref:`ApkUiAutomator workload\n<apkuiautomator-workload>` type with the following command::\n\n    $ wa create workload -k apkuiauto google_docs\n    Workload created in $WA_USER_DIRECTORY/plugins/google_docs\n\n\nFrom here you can navigate to the displayed directory and you will find your\n``__init__.py``  and a ``uiauto`` directory. The former is your python WA\nworkload and will look something like this. For an example of what should be\ndone in each of the main method please see\n:ref:`adding a basic example <adding-a-basic-workload-example>` above.\n\n.. code-block:: python\n\n        from wa import Parameter, ApkUiautoWorkload\n        class GoogleDocs(ApkUiautoWorkload):\n            name = 'google_docs'\n            description = \"This is an placeholder description\"\n            # Replace with a list of supported package names in the APK file(s).\n            package_names = ['package_name']\n\n            parameters = [\n             # Workload parameters go here e.g.\n             Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                       default=1, override=True, mandatory=False,\n                       description='This is an example parameter')\n            ]\n\n            def __init__(self, target, **kwargs):\n             super(GoogleDocs, self).__init__(target, **kwargs)\n             # Define any additional attributes required for the workload\n\n            def init_resources(self, resolver):\n             super(GoogleDocs, self).init_resources(resolver)\n             # This method may be used to perform early resource discovery and\n             # initialization. This is invoked during the initial loading stage and\n             # before the device is ready, so cannot be used for any device-dependent\n             # initialization. This method is invoked before the workload instance is\n             # validated.\n\n            def initialize(self, context):\n             super(GoogleDocs, self).initialize(context)\n             # This method should be used to perform once-per-run initialization of a\n             # workload instance.\n\n            def validate(self):\n             super(GoogleDocs, self).validate()\n             # Validate inter-parameter assumptions etc\n\n            def setup(self, context):\n             super(GoogleDocs, self).setup(context)\n             # Perform any necessary setup before starting the UI automation\n\n            def extract_results(self, context):\n             super(GoogleDocs, self).extract_results(context)\n             # Extract results on the target\n\n            def update_output(self, context):\n             super(GoogleDocs, self).update_output(context)\n             # Update the output within the specified execution context with the\n             # metrics and artifacts form this workload iteration.\n\n            def teardown(self, context):\n             super(GoogleDocs, self).teardown(context)\n             # Perform any final clean up for the Workload.\n\n\nDepending on the purpose of your workload you can choose to implement which\nmethods you require. The main things that need setting are the list of\n``package_names`` which must be a list of strings containing the android package\nname that will be used during resource resolution to locate the relevant apk\nfile for the workload. Additionally the the workload parameters will need to\nupdating to any relevant parameters required by the workload as well as the\ndescription.\n\n\nThe latter will contain a framework for performing the UI automation on the\ntarget, the files you will be most interested in will be\n``uiauto/app/src/main/java/arm/wa/uiauto/UiAutomation.java`` which will contain\nthe actual code of the automation and will look something like:\n\n.. code-block:: java\n\n        package com.arm.wa.uiauto.google_docs;\n\n        import android.app.Activity;\n        import android.os.Bundle;\n        import org.junit.Test;\n        import org.junit.runner.RunWith;\n        import android.support.test.runner.AndroidJUnit4;\n\n        import android.util.Log;\n        import android.view.KeyEvent;\n\n        // Import the uiautomator libraries\n        import android.support.test.uiautomator.UiObject;\n        import android.support.test.uiautomator.UiObjectNotFoundException;\n        import android.support.test.uiautomator.UiScrollable;\n        import android.support.test.uiautomator.UiSelector;\n\n        import org.junit.Before;\n        import org.junit.Test;\n        import org.junit.runner.RunWith;\n\n        import com.arm.wa.uiauto.BaseUiAutomation;\n\n        @RunWith(AndroidJUnit4.class)\n        public class UiAutomation extends BaseUiAutomation {\n\n            protected Bundle parameters;\n            protected int example_parameter;\n\n            public static String TAG = \"google_docs\";\n\n            @Before\n            public void initilize() throws Exception {\n                // Perform any parameter initialization here\n                parameters = getParams(); // Required to decode passed parameters.\n                packageID = getPackageID(parameters);\n                example_parameter = parameters.getInt(\"example_parameter\");\n            }\n\n            @Test\n            public void setup() throws Exception {\n                // Optional: Perform any setup required before the main workload\n                // is ran, e.g. dismissing welcome screens\n            }\n\n            @Test\n            public void runWorkload() throws Exception {\n                   // The main UI Automation code goes here\n            }\n\n            @Test\n            public void extractResults() throws Exception {\n                // Optional: Extract any relevant results from the workload,\n            }\n\n            @Test\n            public void teardown() throws Exception {\n                // Optional: Perform any clean up for the workload\n            }\n        }\n\nA few items to note from the template:\n    - Each of the stages of execution for example ``setup``, ``runWorkload`` etc\n      are decorated with the ``@Test`` decorator, this is important to allow\n      these methods to be called at the appropriate time however any additional\n      methods you may add do not require this decorator.\n    - The ``initialize`` method has the ``@Before`` decorator, this is there to\n      ensure that this method is called before executing any of the workload\n      stages and therefore is used to decode and initialize any parameters that\n      are passed in.\n    - The code currently retrieves the ``example_parameter`` that was\n      provided to the python workload as an Integer, there are similar calls to\n      retrieve parameters of different types e.g. ``getString``, ``getBoolean``,\n      ``getDouble`` etc.\n\nOnce you have implemented your java workload you can use the file\n``uiauto/build.sh`` to compile your automation into an apk file to perform the\nautomation. The generated apk will be generated with the package name\n``com.arm.wa.uiauto.<workload_name>`` which when running your workload will be\nautomatically detected by the resource getters and deployed to the device.\n\n\nAdding a ReventApk Workload\n---------------------------\n\nIf we wish to create a workload to automate the testing of a UI based workload\nthat we cannot / do not wish to use UiAutomator then we can perform the\nautomation using revent. In this example we would want to automatically deploy\nand install an apk file to the target, therefore we would choose the\n:ref:`ApkRevent workload <apkrevent-workload>` type with the following\ncommand::\n\n    $ wa create workload -k apkrevent my_game\n    Workload created in $WA_USER_DIRECTORY/plugins/my_game\n\nThis will generate a revent based workload you will end up with a very similar\npython file as to the one outlined in generating a :ref:`UiAutomator based\nworkload <apkuiautomator-example>` however without the accompanying java\nautomation files.\n\nThe main difference between the two is that this workload will subclass\n``ApkReventWorkload`` instead of ``ApkUiautomatorWorkload`` as shown below.\n\n.. code-block:: python\n\n    from wa import ApkReventWorkload\n\n    class MyGame(ApkReventWorkload):\n\n        name = 'mygame'\n        package_names = ['com.mylogo.mygame']\n\n        # ..\n\n\n---------------------------------------------------------------\n\n.. _adding-an-instrument-example:\n\nAdding an Instrument\n====================\nThis is an example of how we would create a instrument which will trace device\nerrors using a custom \"trace\" binary file. For more detailed information please see the\n:ref:`Instrument Reference <instrument-reference>`. The first thing to do is to create\na new file under ``$WA_USER_DIRECTORY/plugins/`` and subclass\n:class:`Instrument`. Make sure to overwrite the variable name with what we want our instrument\nto be called and then locate our binary for the instrument.\n\n::\n\n        class TraceErrorsInstrument(Instrument):\n\n            name = 'trace-errors'\n\n            def __init__(self, target, **kwargs):\n                super(TraceErrorsInstrument, self).__init__(target, **kwargs)\n                self.binary_name = 'trace'\n                self.binary_file = os.path.join(os.path.dirname(__file__), self.binary_name)\n                self.trace_on_target = None\n\nWe then declare and implement the required methods as detailed in the\n:ref:`Instrument API <instrument-api>`. For the ``initialize`` method, we want to install\nthe executable file to the target so we can use the target's ``install``\nmethod which will try to copy the file to a location on the device that\nsupports execution, change the file mode appropriately and return the\nfile path on the target. ::\n\n    def initialize(self, context):\n        self.trace_on_target = self.target.install(self.binary_file)\n\nThen we implemented the start method, which will simply run the file to start\ntracing. Supposing that the call to this binary requires some overhead to begin\ncollecting errors we might want to decorate the method with the ``@slow``\ndecorator to try and reduce the impact on other running instruments. For more\ninformation on prioritization please see the\n:ref:`Developer Reference <prioritization>`. ::\n\n    @slow\n    def start(self, context):\n        self.target.execute('{} start'.format(self.trace_on_target))\n\nLastly, we need to stop tracing once the workload stops and this happens in the\nstop method, assuming stopping the collection also require some overhead we have\nagain decorated the method. ::\n\n    @slow\n    def stop(self, context):\n        self.target.execute('{} stop'.format(self.trace_on_target))\n\nOnce we have generated our result data we need to retrieve it from the device\nfor further processing or adding directly to WA's output for that job. For\nexample for trace data we will want to pull it to the device and add it as a\n:ref:`artifact <artifact>` to WA's :ref:`context <context>`. Once we have\nretrieved the data, we can now do any further processing and add any relevant\n:ref:`Metrics <metrics>` to the :ref:`context <context>`. For this we will use\nthe the ``add_metric`` method to add the results to the final output for that\nworkload. The method can be passed 4 params, which are the metric `key`,\n`value`, `unit` and `lower_is_better`. ::\n\n    def update_output(self, context):\n        # pull the trace file from the target\n        self.result = os.path.join(self.target.working_directory, 'trace.txt')\n        self.outfile = os.path.join(context.output_directory, 'trace.txt')\n        self.target.pull(self.result, self.outfile)\n        context.add_artifact('error_trace', self.outfile, kind='export')\n\n        # parse the file if needs to be parsed, or add result directly to\n        # context.\n\n        metric = # ..\n        context.add_metric('number_of_errors', metric, lower_is_better=True\n\nAt the end of each job we might want to delete any files generated by the\ninstruments and the code to clear these file goes in teardown method. ::\n\n    def teardown(self, context):\n        self.target.remove(os.path.join(self.target.working_directory, 'trace.txt'))\n\nAt the very end of the run we would want to uninstall the binary we deployed earlier. ::\n\n    def finalize(self, context):\n        self.target.uninstall(self.binary_name)\n\nSo the full example would look something like::\n\n        from wa import Instrument\n\n        class TraceErrorsInstrument(Instrument):\n\n            name = 'trace-errors'\n\n            def __init__(self, target, **kwargs):\n                super(TraceErrorsInstrument, self).__init__(target, **kwargs)\n                self.binary_name = 'trace'\n                self.binary_file = os.path.join(os.path.dirname(__file__), self.binary_name)\n                self.trace_on_target = None\n\n            def initialize(self, context):\n                self.trace_on_target = self.target.install(self.binary_file)\n\n            @slow\n            def start(self, context):\n                self.target.execute('{} start'.format(self.trace_on_target))\n\n            @slow\n            def stop(self, context):\n                self.target.execute('{} stop'.format(self.trace_on_target))\n\n            def update_output(self, context):\n                self.result = os.path.join(self.target.working_directory, 'trace.txt')\n                self.outfile = os.path.join(context.output_directory, 'trace.txt')\n                self.target.pull(self.result, self.outfile)\n                context.add_artifact('error_trace', self.outfile, kind='export')\n\n                metric = # ..\n                context.add_metric('number_of_errors', metric, lower_is_better=True\n\n            def teardown(self, context):\n                self.target.remove(os.path.join(self.target.working_directory, 'trace.txt'))\n\n            def finalize(self, context):\n                self.target.uninstall(self.binary_name)\n\n.. _adding-an-output-processor-example:\n\nAdding an Output Processor\n==========================\n\nThis is an example of how we would create an output processor which will format\nthe run metrics as a column-aligned table. The first thing to do is to create\na new file under ``$WA_USER_DIRECTORY/plugins/`` and subclass\n:class:`OutputProcessor`. Make sure to overwrite the variable name with what we want our\nprocessor to be called and provide a short description.\n\nNext we need to implement any relevant methods, (please see\n:ref:`adding an output processor <adding-an-output-processor>` for all the\navailable methods). In this case we only want to implement the\n``export_run_output`` method as we are not generating any new artifacts and\nwe only care about the overall output rather than the individual job\noutputs. The implementation is very simple, it just loops through all\nthe available metrics for all the available jobs and adds them to a list\nwhich is written to file and then added as an :ref:`artifact <artifact>` to\nthe :ref:`context <context>`.\n\n.. code-block:: python\n\n    import os\n    from wa import OutputProcessor\n    from wa.utils.misc import write_table\n\n\n    class Table(OutputProcessor):\n\n        name = 'table'\n        description = 'Generates a text file containing a column-aligned table of run results.'\n\n        def export_run_output(self, output, target_info):\n            rows = []\n\n            for job in output.jobs:\n                for metric in job.metrics:\n                    rows.append([metric.name, str(metric.value), metric.units or '',\n                                 metric.lower_is_better  and '-' or '+'])\n\n            outfile =  output.get_path('table.txt')\n            with open(outfile, 'w') as wfh:\n                write_table(rows, wfh)\n            output.add_artifact('results_table', 'table.txt', 'export')\n\n\n.. _adding-custom-target-example:\n\nAdding a Custom Target\n======================\nThis is an example of how we would create a customised target, this is typically\nused where we would need to augment the existing functionality for example on\ndevelopment boards where we need to perform additional actions to implement some\nfunctionality. In this example we are going to assume that this particular\ndevice is running Android and requires a special \"wakeup\" command to be sent before it\ncan execute any other command.\n\nTo add a new target to WA we will first create a new file in\n``$WA_USER_DIRECTORY/plugins/example_target.py``. In order to facilitate with\ncreating a new target WA provides a helper function to create a description for\nthe specified target class, and specified components. For components that are\nnot explicitly specified it will attempt to guess sensible defaults based on the target\nclass' bases.\n\n.. code-block:: python\n\n        # Import our helper function\n        from wa import add_description_for_target\n\n        # Import the Target that our custom implementation will be based on\n        from devlib import AndroidTarget\n\n        class ExampleTarget(AndroidTarget):\n            # Provide the name that will be used to identify your custom target\n            name = 'example_target'\n\n            # Override our custom method(s)\n            def execute(self, *args, **kwargs):\n                super(ExampleTarget, self).execute('wakeup', check_exit_code=False)\n                return super(ExampleTarget, self).execute(*args, **kwargs)\n\n\n        description = '''An Android target which requires an explicit \"wakeup\" command\n                          to be sent before accepting any other command'''\n        # Call the helper function with our newly created function and its description.\n        add_description_for_target(ExampleTarget, description)\n\n"
  },
  {
    "path": "doc/source/developer_information/how_tos/processing_output.rst",
    "content": ".. _processing_output:\n\nProcessing WA Output\n====================\n\nThis section will illustrate the use of WA's :ref:`output processing API\n<output_processing_api>` by creating a simple ASCII report generator. To make\nthings concrete, this how-to will be processing the output from running the\nfollowing agenda::\n\n        sections:\n            - runtime_params:\n                frequency: min\n              classifiers:\n                  frequency: min\n            - runtime_params:\n                frequency: max\n              classifiers:\n                  frequency: max\n        workloads:\n            - sysbench\n            - deepbench\n\nThis runs two workloads under two different configurations each -- once with\nCPU frequency fixed to max, and once with CPU frequency fixed to min.\nClassifiers are used to indicate the configuration in the output.\n\nFirst, create the :class:`RunOutput` object, which is the main interface for\ninteracting with WA outputs. Or alternatively a :class:`RunDatabaseOutput`\nif storing your results in a postgres database.\n\n.. code-block:: python\n\n        import sys\n\n        from wa import RunOutput\n\n        # Path to the output directory specified in the first argument\n        ro = RunOutput(sys.argv[1])\n\n\nRun Info\n--------\n\nNext, we're going to print out an overall summary of the run.\n\n\n.. code-block:: python\n\n        from __future__ import print_function   # for Python 2 compat.\n\n        from wa.utils.misc import format_duration\n\n        print('-'*20)\n        print('Run ID:', ro.info.uuid)\n        print('Run status:', ro.status)\n        print('Run started at:', ro.info.start_time.isoformat())\n        print('Run completed at:', ro.info.end_time.isoformat())\n        print('Run duration:', format_duration(ro.info.duration))\n        print('Ran', len(ro.jobs), 'jobs')\n        print('-'*20)\n        print()\n\n``RunOutput.info`` is an instance of :class:`RunInfo` which encapsulates\nOverall-run metadata, such as the duration.\n\n\nTarget Info\n-----------\n\nNext, some information about the device the results where collected on.\n\n.. code-block:: python\n\n        print('    Target Information     ')\n        print('    -------------------    ')\n        print('hostname:', ro.target_info.hostname)\n        if ro.target_info.os == 'android':\n            print('Android ID:', ro.target_info.android_id)\n        else:\n            print('host ID:', ro.target_info.hostid)\n        print('CPUs:', ', '.join(cpu.name for cpu in ro.target_info.cpus))\n        print()\n\n        print('OS:', ro.target_info.os)\n        print('ABI:', ro.target_info.abi)\n        print('rooted:', ro.target_info.is_rooted)\n        print('kernel version:', ro.target_info.kernel_version)\n        print('os version:')\n        for k, v in ro.target_info.os_version.items():\n            print('\\t', k+':', v)\n        print()\n        print('-'*27)\n        print()\n\n``RunOutput.target_info`` is an instance of :class:`TargetInfo` that contains\ninformation collected from the target during the run.\n\n\nJobs Summary\n------------\n\nNext, show a summary of executed jobs.\n\n.. code-block:: python\n\n        from wa.utils.misc import write_table\n\n        print('           Jobs            ')\n        print('           ----            ')\n        print()\n        rows = []\n        for job in ro.jobs:\n            rows.append([job.id, job.label, job.iteration, job.status])\n        write_table(rows, sys.stdout, align='<<><',\n                headers=['ID', 'LABEL', 'ITER.', 'STATUS'])\n        print()\n        print('-'*27)\n        print()\n\n``RunOutput.jobs`` is a list of :class:`JobOutput` objects. These contain\ninformation about that particular job, including its execution status, and\n:ref:`metrics` and :ref:`artifact` generated by the job.\n\n\nCompare Metrics\n---------------\n\nFinally, collect metrics, sort them by the \"frequency\" classifier. Classifiers\nthat are present in the metric but not its job have been added by the workload.\nFor the purposes of this report, they will be used to augment the metric's name.\n\n.. code-block:: python\n\n        from collections import defaultdict\n\n        print()\n        print('    Metrics Comparison     ')\n        print('    ------------------     ')\n        print()\n        scores = defaultdict(lambda: defaultdict(lambda: defaultdict()))\n        for job in ro.jobs:\n            for metric in job.metrics:\n                workload = job.label\n                name = metric.name\n                freq = job.classifiers['frequency']\n                for cname, cval in sorted(metric.classifiers.items()):\n                    if cname not in job.classifiers:\n                        # was not propagated from the job, therefore was\n                        # added by the workload\n                        name += '/{}={}'.format(cname, cval)\n\n                scores[workload][name][freq] = metric\n\nOnce the metrics have been sorted, generate the report showing the delta\nbetween the two configurations (indicated by the \"frequency\" classifier) and\nhighlight any unexpected deltas (based on the ``lower_is_better`` attribute of\nthe metric). (In practice, you will want to run multiple iterations of each\nconfiguration, calculate averages and standard deviations, and only highlight\nstatically significant deltas.)\n\n.. code-block:: python\n\n        rows = []\n        for workload in sorted(scores.keys()):\n            wldata = scores[workload]\n\n            for name in sorted(wldata.keys()):\n                min_score = wldata[name]['min'].value\n                max_score = wldata[name]['max'].value\n                delta =  max_score - min_score\n                units = wldata[name]['min'].units or ''\n                lib = wldata[name]['min'].lower_is_better\n\n                warn = ''\n                if (lib and delta > 0) or (not lib and delta < 0):\n                    warn = '!!!'\n\n                rows.append([workload, name,\n                '{:.3f}'.format(min_score), '{:.3f}'.format(max_score),\n                '{:.3f}'.format(delta), units, warn])\n\n            # separate workloads with a blank row\n            rows.append(['', '', '', '', '', '', ''])\n\n\n        write_table(rows, sys.stdout, align='<<>>><<',\n                headers=['WORKLOAD', 'METRIC', 'MIN.', 'MAX', 'DELTA', 'UNITS', ''])\n        print()\n        print('-'*27)\n\nThis concludes this how-to. For more information, please see :ref:`output\nprocessing API documentation <output_processing_api>`.\n\n\nComplete Example\n----------------\n\nBelow is the complete example code, and a report it generated for a sample run.\n\n.. code-block:: python\n\n        from __future__ import print_function   # for Python 2 compat.\n        import sys\n        from collections import defaultdict\n\n        from wa import RunOutput\n        from wa.utils.misc import format_duration, write_table\n\n\n\n        # Path to the output directory specified in the first argument\n        ro = RunOutput(sys.argv[1])\n\n        print('-'*27)\n        print('Run ID:', ro.info.uuid)\n        print('Run status:', ro.status)\n        print('Run started at:', ro.info.start_time.isoformat())\n        print('Run completed at:', ro.info.end_time.isoformat())\n        print('Run duration:', format_duration(ro.info.duration))\n        print('Ran', len(ro.jobs), 'jobs')\n        print('-'*27)\n        print()\n\n        print('    Target Information     ')\n        print('    -------------------    ')\n        print('hostname:', ro.target_info.hostname)\n        if ro.target_info.os == 'android':\n            print('Android ID:', ro.target_info.android_id)\n        else:\n            print('host ID:', ro.target_info.hostid)\n        print('CPUs:', ', '.join(cpu.name for cpu in ro.target_info.cpus))\n        print()\n\n        print('OS:', ro.target_info.os)\n        print('ABI:', ro.target_info.abi)\n        print('rooted:', ro.target_info.is_rooted)\n        print('kernel version:', ro.target_info.kernel_version)\n        print('OS version:')\n        for k, v in ro.target_info.os_version.items():\n            print('\\t', k+':', v)\n        print()\n        print('-'*27)\n        print()\n\n        print('           Jobs            ')\n        print('           ----            ')\n        print()\n        rows = []\n        for job in ro.jobs:\n            rows.append([job.id, job.label, job.iteration, job.status])\n        write_table(rows, sys.stdout, align='<<><',\n                headers=['ID', 'LABEL', 'ITER.', 'STATUS'])\n        print()\n        print('-'*27)\n\n        print()\n        print('    Metrics Comparison     ')\n        print('    ------------------     ')\n        print()\n        scores = defaultdict(lambda: defaultdict(lambda: defaultdict()))\n        for job in ro.jobs:\n            for metric in job.metrics:\n                workload = job.label\n                name = metric.name\n                freq = job.classifiers['frequency']\n                for cname, cval in sorted(metric.classifiers.items()):\n                    if cname not in job.classifiers:\n                        # was not propagated from the job, therefore was\n                        # added by the workload\n                        name += '/{}={}'.format(cname, cval)\n\n            scores[workload][name][freq] = metric\n\n        rows = []\n        for workload in sorted(scores.keys()):\n            wldata = scores[workload]\n\n            for name in sorted(wldata.keys()):\n                min_score = wldata[name]['min'].value\n                max_score = wldata[name]['max'].value\n                delta =  max_score - min_score\n                units = wldata[name]['min'].units or ''\n                lib = wldata[name]['min'].lower_is_better\n\n                warn = ''\n                if (lib and delta > 0) or (not lib and delta < 0):\n                    warn = '!!!'\n\n                rows.append([workload, name,\n                '{:.3f}'.format(min_score), '{:.3f}'.format(max_score),\n                '{:.3f}'.format(delta), units, warn])\n\n            # separate workloads with a blank row\n            rows.append(['', '', '', '', '', '', ''])\n\n\n        write_table(rows, sys.stdout, align='<<>>><<',\n                headers=['WORKLOAD', 'METRIC', 'MIN.', 'MAX', 'DELTA', 'UNITS', ''])\n        print()\n        print('-'*27)\n\nSample output::\n\n        ---------------------------\n        Run ID: 78aef931-cd4c-429b-ac9f-61f6893312e6\n        Run status: OK\n        Run started at: 2018-06-27T12:55:23.746941\n        Run completed at: 2018-06-27T13:04:51.067309\n        Run duration: 9 minutes 27 seconds\n        Ran 4 jobs\n        ---------------------------\n\n        Target Information\n        -------------------\n        hostname: localhost\n        Android ID: b9d1d8b48cfba007\n        CPUs: A53, A53, A53, A53, A73, A73, A73, A73\n\n        OS: android\n        ABI: arm64\n        rooted: True\n        kernel version: 4.9.75-04208-g2c913991a83d-dirty 114 SMP PREEMPT Wed May 9 10:33:36 BST 2018\n        OS version:\n                all_codenames: O\n                base_os:\n                codename: O\n                incremental: eng.valsch.20170517.180115\n                preview_sdk: 0\n                release: O\n                sdk: 25\n                security_patch: 2017-04-05\n\n        ---------------------------\n\n                Jobs\n                ----\n\n        ID     LABEL     ITER. STATUS\n        --     -----     ----- ------\n        s1-wk1 sysbench      1 OK\n        s1-wk2 deepbench     1 OK\n        s2-wk1 sysbench      1 OK\n        s2-wk2 deepbench     1 OK\n\n        ---------------------------\n\n        Metrics Comparison\n        ------------------\n\n        WORKLOAD  METRIC                                            MIN.       MAX    DELTA UNITS\n        --------  ------                                            ----       ---    ----- -----\n        deepbench GOPS/a_t=n/b_t=n/k=1024/m=128/n=1                0.699     0.696   -0.003         !!!\n        deepbench GOPS/a_t=n/b_t=n/k=1024/m=3072/n=1               0.471     0.715    0.244\n        deepbench GOPS/a_t=n/b_t=n/k=1024/m=3072/n=1500           23.514    36.432   12.918\n        deepbench GOPS/a_t=n/b_t=n/k=1216/m=64/n=1                 0.333     0.333   -0.000         !!!\n        deepbench GOPS/a_t=n/b_t=n/k=128/m=3072/n=1                0.405     1.073    0.668\n        deepbench GOPS/a_t=n/b_t=n/k=128/m=3072/n=1500            19.914    34.966   15.052\n        deepbench GOPS/a_t=n/b_t=n/k=128/m=4224/n=1                0.232     0.486    0.255\n        deepbench GOPS/a_t=n/b_t=n/k=1280/m=128/n=1500            20.721    31.654   10.933\n        deepbench GOPS/a_t=n/b_t=n/k=1408/m=128/n=1                0.701     0.702    0.001\n        deepbench GOPS/a_t=n/b_t=n/k=1408/m=176/n=1500            19.902    29.116    9.214\n        deepbench GOPS/a_t=n/b_t=n/k=176/m=4224/n=1500            26.030    39.550   13.519\n        deepbench GOPS/a_t=n/b_t=n/k=2048/m=35/n=700              10.884    23.615   12.731\n        deepbench GOPS/a_t=n/b_t=n/k=2048/m=5124/n=700            26.740    37.334   10.593\n        deepbench execution_time                                 318.758   220.629  -98.129 seconds !!!\n        deepbench time (msec)/a_t=n/b_t=n/k=1024/m=128/n=1         0.375     0.377    0.002         !!!\n        deepbench time (msec)/a_t=n/b_t=n/k=1024/m=3072/n=1       13.358     8.793   -4.565\n        deepbench time (msec)/a_t=n/b_t=n/k=1024/m=3072/n=1500   401.338   259.036 -142.302\n        deepbench time (msec)/a_t=n/b_t=n/k=1216/m=64/n=1          0.467     0.467    0.000         !!!\n        deepbench time (msec)/a_t=n/b_t=n/k=128/m=3072/n=1         1.943     0.733   -1.210\n        deepbench time (msec)/a_t=n/b_t=n/k=128/m=3072/n=1500     59.237    33.737  -25.500\n        deepbench time (msec)/a_t=n/b_t=n/k=128/m=4224/n=1         4.666     2.224   -2.442\n        deepbench time (msec)/a_t=n/b_t=n/k=1280/m=128/n=1500     23.721    15.528   -8.193\n        deepbench time (msec)/a_t=n/b_t=n/k=1408/m=128/n=1         0.514     0.513   -0.001\n        deepbench time (msec)/a_t=n/b_t=n/k=1408/m=176/n=1500     37.354    25.533  -11.821\n        deepbench time (msec)/a_t=n/b_t=n/k=176/m=4224/n=1500     85.679    56.391  -29.288\n        deepbench time (msec)/a_t=n/b_t=n/k=2048/m=35/n=700        9.220     4.249   -4.970\n        deepbench time (msec)/a_t=n/b_t=n/k=2048/m=5124/n=700    549.413   393.517 -155.896\n\n        sysbench  approx.  95 percentile                           3.800     1.450   -2.350 ms\n        sysbench  execution_time                                   1.790     1.437   -0.353 seconds !!!\n        sysbench  response time avg                                1.400     1.120   -0.280 ms\n        sysbench  response time max                               40.740    42.760    2.020 ms      !!!\n        sysbench  response time min                                0.710     0.710    0.000 ms\n        sysbench  thread fairness events avg                    1250.000  1250.000    0.000\n        sysbench  thread fairness events stddev                  772.650   213.040 -559.610\n        sysbench  thread fairness execution time avg               1.753     1.401   -0.352         !!!\n        sysbench  thread fairness execution time stddev            0.000     0.000    0.000\n        sysbench  total number of events                       10000.000 10000.000    0.000\n        sysbench  total time                                       1.761     1.409   -0.352 s\n\n\n        ---------------------------\n\n"
  },
  {
    "path": "doc/source/developer_information.rst",
    "content": "=====================\nDeveloper Information\n=====================\n\n.. contents:: Contents\n   :depth: 4\n   :local:\n\n------------------\n\n.. include:: developer_information/developer_guide.rst\n\n------------------\n\n.. include:: developer_information/how_to.rst\n\n------------------\n\n.. include:: developer_information/developer_reference.rst\n"
  },
  {
    "path": "doc/source/faq.rst",
    "content": ".. _faq:\n\nFAQ\n===\n\n.. contents::\n   :depth: 1\n   :local:\n\n---------------------------------------------------------------------------------------\n\n\n**Q:** I receive the error: ``\"<<Workload> file <file_name> file> could not be found.\"``\n-----------------------------------------------------------------------------------------\n\n**A:** Some workload e.g. AdobeReader, GooglePhotos etc require external asset\nfiles. We host some additional workload dependencies in the `WA Assets Repo\n<https://github.com/ARM-software/workload-automation-assets>`_. To allow WA to\ntry and automatically download required assets from the repository please add\nthe following to your configuration:\n\n.. code-block:: YAML\n\n        remote_assets_url: https://raw.githubusercontent.com/ARM-software/workload-automation-assets/master/dependencies\n\n------------\n\n**Q:** I receive the error: ``\"No matching package found for workload <workload>\"``\n------------------------------------------------------------------------------------\n\n**A:** WA cannot locate the application required for the workload. Please either\ninstall the application onto the device or source the apk and place into\n``$WA_USER_DIRECTORY/dependencies/<workload>``\n\n------------\n\n**Q:** I am trying to set a valid runtime parameters however I still receive the error ``\"Unknown runtime parameter\"``\n-------------------------------------------------------------------------------------------------------------------------\n\n**A:** Please ensure you have the corresponding module loaded on the device.\nSee :ref:`Runtime Parameters <runtime-parameters>` for the list of\nruntime parameters and their containing modules, and the appropriate section in\n:ref:`setting up a device <setting-up-a-device>` for ensuring it is installed.\n\n-------------\n\n**Q:** I have a big.LITTLE device but am unable to set parameters corresponding to the big or little core and receive the error ``\"Unknown runtime parameter\"``\n-----------------------------------------------------------------------------------------------------------------------------------------------------------------\n\n**A:** Please ensure you have the hot plugging module enabled for your device (Please see question above).\n\n\n**A:** This can occur if the device uses dynamic hot-plugging and although WA\nwill try to online all cores to perform discovery sometimes this can fail\ncausing to WA to incorrectly assume that only one cluster is present. To\nworkaround this please set the ``core_names`` :ref:`parameter <core-names>` in the configuration for\nyour device.\n\n\n**Q:** I receive the error ``Could not find plugin or alias \"standard\"``\n------------------------------------------------------------------------\n\n**A:** Upon first use of WA3, your WA2 config file typically located at\n``$USER_HOME/config.py`` will have been converted to a WA3 config file located at\n``$USER_HOME/config.yaml``. The \"standard\" output processor, present in WA2, has\nbeen merged into the core framework and therefore no longer exists. To fix this\nerror please remove the \"standard\" entry from the \"augmentations\" list in the\nWA3 config file.\n\n**Q:** My Juno board keeps resetting upon starting WA even if it hasn't crashed.\n--------------------------------------------------------------------------------\n**A** Please ensure that you do not have any other terminals (e.g. ``screen``\nsessions) connected to the board's UART. When WA attempts to open the connection\nfor its own use this can cause the board to reset if a connection is already\npresent.\n\n\n**Q:** I'm using the FPS instrument but I do not get any/correct results for my workload\n-----------------------------------------------------------------------------------------\n\n**A:** If your device is running with Android 6.0 + then the default utility for\ncollecting fps metrics will be ``gfxinfo`` however this does not seem to be able\nto extract any meaningful information for some workloads. In this case please\ntry setting the ``force_surfaceflinger`` parameter for the ``fps`` augmentation\nto ``True``. This will attempt to guess the \"View\" for the workload\nautomatically however this is device specific and therefore may need\ncustomizing. If this is required please open the application and execute\n``dumpsys SurfaceFlinger --list`` on the device via adb. This will provide a\nlist of all views available for measuring.\n\nAs an example, when trying to find the view for the AngryBirds Rio workload you\nmay get something like:\n\n.. code-block:: none\n\n        ...\n        AppWindowToken{41dfe54 token=Token{77819a7 ActivityRecord{a151266 u0 com.rovio.angrybirdsrio/com.rovio.fusion.App t506}}}#0\n        a3d001c com.rovio.angrybirdsrio/com.rovio.fusion.App#0\n        Background for -SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0\n        SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0\n        com.rovio.angrybirdsrio/com.rovio.fusion.App#0\n        boostedAnimationLayer#0\n        mAboveAppWindowsContainers#0\n        ...\n\nFrom these ``\"SurfaceView - com.rovio.angrybirdsrio/com.rovio.fusion.App#0\"`` is\nthe mostly likely the View that needs to be set as the ``view`` workload\nparameter and will be picked up be the ``fps`` augmentation.\n\n\n**Q:** I am getting an error which looks similar to ``'CONFIG_SND_BT87X is not exposed in kernel config'...``\n-------------------------------------------------------------------------------------------------------------\n**A:** If you are receiving this under normal operation this can be caused by a\nmismatch of your WA and devlib versions. Please update both to their latest\nversions and delete your ``$USER_HOME/.workload_automation/cache/targets.json``\n(or equivalent) file.\n\n**Q:** I get an error which looks similar to ``UnicodeDecodeError('ascii' codec can't decode byte...``\n------------------------------------------------------------------------------------------------------\n**A:** If you receive this error or a similar warning about your environment,\nplease ensure that you configure your environment to use a locale which supports\nUTF-8. Otherwise this can cause issues when attempting to parse files containing\nnone ascii characters.\n\n**Q:** I get the error ``Module \"X\" failed to install on target``\n------------------------------------------------------------------------------------------------------\n**A:** By default a set of devlib modules will be automatically loaded onto the\ntarget designed to add additional functionality. If the functionality provided\nby the module is not required then the module can be safely disabled by setting\n``load_default_modules`` to ``False`` in the ``device_config`` entry of the\n:ref:`agenda <config-agenda-entry>` and then re-enabling any specific modules\nthat are still required. An example agenda snippet is shown below:\n\n.. code-block:: none\n\n    config:\n        device: generic_android\n        device_config:\n            load_default_modules: False\n            modules: ['list', 'of', 'modules', 'to', 'enable']\n"
  },
  {
    "path": "doc/source/glossary.rst",
    "content": ".. _glossary:\n\nGlossary\n========\n\n.. glossary::\n\n    Agenda\n        An agenda specifies what is to be done during a Workload Automation\n        run. This includes which workloads will be run, with what configuration\n        and which augmentations will be enabled, etc. (For more information\n        please see the :ref:`Agenda Reference <agenda-reference>`.)\n\n    Alias\n        An alias associated with a workload or a parameter. In case of\n        parameters, this is simply an alternative name for a parameter; Usually\n        these are employed to provide backward compatibility for renamed\n        parameters, or in cases where a there are several commonly used terms,\n        each equally valid, for something.\n\n        In case of Workloads, aliases can also be merely alternatives to the\n        workload name, however they can also alter the default values for the\n        parameters the Workload is instantiated with. A common scenario is when\n        a single workload can be run under several distinct configurations (e.g.\n        has several alternative tests that might be run) that are configurable\n        via a parameter. An alias may be added for each such configuration. In\n        order to see the available aliases for a workload, one can use :ref:`show\n        command <show-command>`\\ .\n\n        .. seealso:: :term:`Global Alias`\n\n    Artifact\n        An artifact is something that was been generated as part of the run\n        for example a file containing output or meta data in the form of log\n        files. WA supports multiple \"kinds\" of artifacts and will handle them\n        accordingly, for more information please see the\n        :ref:`Developer Reference <artifact>`.\n\n    Augmentation\n        Augmentations are plugins that augment the execution of\n        workload jobs with additional functionality; usually, that takes the\n        form of generating additional metrics and/or artifacts, such as traces\n        or logs. For more information please see\n        :ref:`augmentations <augmentations>`.\n\n    Classifier\n        An arbitrary key-value pair that may associated with a :term:`job`\\ , a\n        :term:`metric`\\ , or an :term:`artifact`. The key must be a string. The\n        value can be any simple scalar type (string, integer, boolean, etc).\n        These have no pre-defined meaning but may be used to aid\n        filtering/grouping of metrics and artifacts during output processing.\n\n        .. seealso:: :ref:`classifiers`.\n\n    Global Alias\n        Typically, values for plugin parameters are specified name spaced under\n        the plugin's name in the configuration. A global alias is an alias that\n        may be specified at the top level in configuration.\n\n        There two common reasons for this. First, several plugins might\n        specify the same global alias for the same parameter, thus allowing all\n        of them to be configured with one settings. Second, a plugin may not be\n        exposed directly to the user (e.g. resource getters) so it makes more\n        sense to treat its parameters as global configuration values.\n\n        .. seealso:: :term:`Alias`\n\n    Instrument\n        A WA \"Instrument\" can be quite diverse in its functionality, but\n        the majority of those available in are there to collect some kind of\n        additional data (such as trace, energy readings etc.) from the device\n        during workload execution. To see available instruments please use the\n        :ref:`list command <list-command>` or see the\n        :ref:`Plugin Reference <instruments>`.\n\n    Job\n        An single execution of a workload. A job is defined by an associated\n        :term:`spec`. However, multiple jobs can share the same spec;\n        E.g. Even if you only have 1 workload to run but wanted 5 iterations\n        then 5 individual jobs will be generated to be run.\n\n    Metric\n        A single numeric measurement or score collected during job execution.\n\n    Output Processor\n        An \"Output Processor\" is what is used to process the output\n        generated by a workload. They can simply store the results in a presentable\n        format or use the information collected to generate additional metrics.\n        To see available output processors please use the\n        :ref:`list command <list-command>` or see the\n        :ref:`Plugin Reference <output-processors>`.\n\n    Run\n        A single execution of `wa run` command. A run consists of one or more\n        :term:`job`\\ s, and results in a single output directory structure\n        containing job results and metadata.\n\n    Section\n        A set of configurations for how jobs should be run. The\n        settings in them take less precedence than workload-specific settings. For\n        every section, all jobs will be run again, with the changes\n        specified in the section's agenda entry. Sections\n        are useful for several runs in which global settings change.\n\n    Spec\n        A specification of a workload. For example you can have a single\n        workload specification that is then executed multiple times if you\n        desire multiple iterations but the configuration for the workload will\n        remain the same. In WA2 the term \"iteration\" used to refer to the same\n        underlying idea as spec now does. It should be noted however, that this\n        is no longer the case and an iteration is merely a configuration point\n        in WA3. Spec is to blueprint as job is to product.\n\n    WA\n        Workload Automation. The full name of this framework.\n\n    Workload\n        A workload is the lowest level specification for tasks that need to be run\n        on a target. A workload can have multiple iterations, and be run additional\n        multiples of times dependent on the number of sections.\n"
  },
  {
    "path": "doc/source/index.rst",
    "content": ".. Workload Automation 3 documentation master file,\n\n================================================\nWelcome to Documentation for Workload Automation\n================================================\n\nWorkload Automation (WA) is a framework for executing workloads and collecting\nmeasurements on Android and Linux devices. WA includes automation for nearly 40\nworkloads and supports some common instrumentation (ftrace, hwmon) along with a\nnumber of output formats.\n\nWA is designed primarily as a developer tool/framework to facilitate data driven\ndevelopment by providing a method of collecting measurements from a device in a\nrepeatable way.\n\nWA is highly extensible. Most of the concrete functionality is\nimplemented via :ref:`plug-ins <plugin-reference>`, and it is easy to\n:ref:`write new plug-ins <writing-plugins>` to support new device types,\nworkloads, instruments or output processing.\n\n.. note:: To see the documentation of individual plugins please see the\n          :ref:`Plugin Reference <plugin-reference>`.\n\n.. contents:: Contents\n\n\nWhat's New\n==========\n\n.. toctree::\n   :maxdepth: 1\n\n   changes\n   migration_guide\n\nUser Information\n================\n\nThis section lists general usage documentation. If you're new to WA3, it is\nrecommended you start with the :ref:`User Guide <user-guide>` page. This section also contains\ninstallation and configuration guides.\n\n.. toctree::\n   :maxdepth: 3\n\n   user_information\n\n\n.. _in-depth:\n\nDeveloper Information\n=====================\n\nThis section contains more advanced topics, such how to write your own Plugins\nand detailed descriptions of how WA functions under the hood.\n\n.. toctree::\n   :maxdepth: 3\n\n   developer_information\n\n\nPlugin Reference\n================\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins\n\nAPI\n===\n\n.. toctree::\n    :maxdepth: 2\n\n    api\n\nGlossary\n========\n\n.. toctree::\n    :maxdepth: 2\n\n    glossary\n\nFAQ\n====\n\n.. toctree::\n   :maxdepth: 2\n\n   faq\n"
  },
  {
    "path": "doc/source/instrument_method_map.template",
    "content": ".. _instruments_method_map:\n\nInstrumentation Signal-Method Mapping\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nInstrument methods get automatically hooked up to signals based on their names.\nMostly, the method name corresponds to the name of the signal, however there are\na few convenience aliases defined (listed first) to make  easier to relate\ninstrumentation code to the workload execution model. For an overview on when\nthese signals are dispatched during execution please see the\n:ref:`Developer Reference <signal_dispatch>`.\n\n$signal_names\n\nThe methods above may be decorated with on the listed decorators to set the\npriority (a value in the ``wa.framework.signal.CallbackPriority`` enum) of the\nInstrument method relative to other callbacks registered for the signal (within\nthe same priority level, callbacks are invoked in the order they were\nregistered). The table below shows the mapping of the decorator to the\ncorresponding priority name and level:\n\n$priority_prefixes\n\n\nUnresponsive Targets\n~~~~~~~~~~~~~~~~~~~~\n\nIf a target is believed to be unresponsive, instrument callbacks will be\ndisabled to prevent a cascade of errors and potential corruptions of state, as\nit is generally assumed that instrument callbacks will want to do something with\nthe target.\n\nIf your callback only does something with the host, and does not require an\nactive target connection, you can decorate it with ``@hostside`` decorator to\nensure it gets invoked even if the target becomes unresponsive.\n"
  },
  {
    "path": "doc/source/migration_guide.rst",
    "content": ".. _migration-guide:\n\nMigration Guide\n================\n\n.. contents:: Contents\n   :depth: 4\n   :local:\n\nUsers\n\"\"\"\"\"\n\nConfiguration\n--------------\n\nDefault configuration file change\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nInstead of the standard ``config.py`` file located at\n``$WA_USER_DIRECTORY/config.py`` WA now uses a ``confg.yaml`` file (at the same\nlocation) which is written in the YAML format instead of python. Additionally\nupon first invocation WA3 will automatically try and detect whether a WA2 config\nfile is present and convert it to use the new WA3 format. During this process\nany known parameter name changes should be detected and updated accordingly.\n\nPlugin Changes\n^^^^^^^^^^^^^^^\nPlease note that not all plugins that were available for WA2 are currently\navailable for WA3 so you may need to remove plugins that are no longer present\nfrom your config files. One plugin of note is the ``standard`` results\nprocessor, this has been removed and it's functionality built into the core\nframework.\n\n--------------------------------------------------------\n\nAgendas\n-------\n\nWA3 is designed to keep configuration as backwards compatible as possible so\nmost agendas should work out of the box, however the main changes in the style\nof WA3 agendas are:\n\nGlobal Section\n^^^^^^^^^^^^^^\nThe ``global`` and ``config`` sections have been merged so now all configuration\nthat was specified under the \"global\" keyword can now also be specified under\n\"config\". Although \"global\"  is still a valid keyword you will need to ensure that\nthere are not duplicated entries in each section.\n\nInstrumentation and Results Processors merged\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nThe ``instrumentation`` and ``results_processors`` sections from WA2 have now\nbeen merged into a single ``augmentations`` section to simplify the\nconfiguration process. Although for backwards compatibility, support for the old\nsections has be retained.\n\n\nPer workload enabling of augmentations\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nAll augmentations can now been enabled and disabled on a per workload basis.\n\n\nSetting Runtime Parameters\n^^^^^^^^^^^^^^^^^^^^^^^^^^\n:ref:`Runtime Parameters <runtime-parameters>` are now the preferred way of\nconfiguring, cpufreq, hotplug and cpuidle rather setting the corresponding\nsysfile values as this will perform additional validation and ensure the nodes\nare set in the correct order to avoid any conflicts.\n\nParameter Changes\n^^^^^^^^^^^^^^^^^\nAny parameter names changes listed below will also have their old names\nspecified as aliases and should continue to work as normal, however going forward\nthe new parameter names should be preferred:\n\n- The workload parameter :confval:`clean_up` has be renamed to :confval:`cleanup_assets` to\n  better reflect its purpose.\n\n- The workload parameter :confval:`check_apk` has been renamed to\n  :confval:`prefer_host_package` to be more explicit in it's functionality to indicated\n  whether a package on the target or the host should have priority when\n  searching for a suitable package.\n\n- The execution order ``by_spec`` is now called ``by_workload`` for clarity of\n  purpose. For more information please see :ref:`configuration-specification`.\n\n- The ``by_spec`` reboot policy has been removed as this is no longer relevant\n  and the ``each_iteration`` reboot policy has been renamed to ``each_job``,\n  please see :ref:`configuration-specification` for more information.\n\nIndividual workload parameters have been attempted to be standardized for the\nmore common operations e.g.:\n\n  - :confval:`iterations` is now :confval:`loops` to indicate the how many\n    'tight loops' of the workload should be performed, e.g. without the\n    setup/teardown method calls.\n  - :confval:`num_threads` is now consistently :confval:`threads` across workloads.\n  - :confval:`run_timeout` is now consistently :confval:`timeout` across workloads.\n  - :confval:`taskset_mask` and :confval:`cpus` have been changed to\n    consistently be referred to as :confval:`cpus` and its types is now\n    a :class:`cpu_mask` type allowing configuration to be supplied either\n    directly as a mask, as a list of a list of cpu indexes or as a sysfs-style\n    string.\n\nOutput\n^^^^^^^\nOutput Directory\n~~~~~~~~~~~~~~~~\nThe :ref:`output directory <output_directory>`'s structure has changed layout\nand now includes additional subdirectories. There is now a ``__meta`` directory\nthat contains copies of the agenda and config files supplied to WA for that\nparticular run so that all the relevant config is self contained. Additionally\nif one or more jobs fail during a run then corresponding output directory will be\nmoved into a ``__failed`` subdirectory to allow for quicker analysis.\n\n\nOutput API\n~~~~~~~~~~\nThere is now an Output API which can be used to more easily post process the\noutput from a run. For more information please see the\n:ref:`Output API <output_processing_api>` documentation.\n\n\n-----------------------------------------------------------\n\nDevelopers\n\"\"\"\"\"\"\"\"\"\"\"\"\n\nFramework\n---------\n\nImports\n^^^^^^^\n\nTo distinguish between the different versions of WA, WA3's package name has been\nrenamed to ``wa``. This means that all the old ``wlauto`` imports will need to\nbe updated. For more information please see the corresponding section in the\n:ref:`developer reference section<developer_reference>`\n\nAsset Deployment\n^^^^^^^^^^^^^^^^^^\nWA3 now contains a generic assets deployment and clean up mechanism so if a\nworkload was previously doing this in an ad-hoc manner this should be updated to\nutilize the new functionality. To make use of this functionality a list of\nassets should be set as the workload ``deployable_assets`` attribute, these will\nbe automatically retrieved via WA's resource getters and deployed either to the\ntargets working directory or a custom directory specified as the workloads\n``assets_directory`` attribute. If a custom implementation is required the\n``deploy_assets`` method should be overridden inside the workload. To allow for\nthe removal of the additional assets any additional file paths should be added\nto the ``self.deployed_assets`` list which is used to keep track of any assets\nthat have been deployed for the workload. This is what is used by the generic\n``remove_assets`` method to clean up any files deployed to the target.\nOptionally if the file structure of the deployed assets requires additional\nlogic then the ``remove_assets`` method can be overridden for a particular\nworkload as well.\n\n--------------------------------------------------------\n\nWorkloads\n---------\n\nPython Workload Structure\n^^^^^^^^^^^^^^^^^^^^^^^^^^\n- The ``update_results`` method has been split out into 2 stages. There is now\n  ``extract_results`` and ``update_output`` which should be used for extracting\n  any results from the target back to the host system and to update the output\n  with any metrics or artefacts for the specific workload iteration respectively.\n\n- WA now features :ref:`execution decorators <execution-decorators>` which can\n  be used to allow for more efficient binary deployment and that they are only\n  installed to the device once per run. For more information of implementing\n  this please see\n  :ref:`deploying executables to a target <deploying-executables>`.\n\n\nAPK Functionality\n^^^^^^^^^^^^^^^^^\nAll apk functionality has re-factored into an APKHandler object which is\navailable as the apk attribute of the workload. This means that for example\n``self.launchapplication()`` would now become ``self.apk.start_activity()``\n\n\nUiAutomator Java Structure\n^^^^^^^^^^^^^^^^^^^^^^^^^^\nInstead of a single ``runUiAutomation`` method to perform all of the UiAutomation,\nthe structure has been refactored into 5 methods that can optionally be overridden.\nThe available methods are ``initialize``, ``setup``, ``runWorkload``, ``extactResults``\nand ``teardown`` to better mimic the different stages in the python workload.\n\n\n  - ``initialize`` should be used to retrieve\n    and set any relevant parameters required during the workload.\n  - ``setup`` should be used to perform any setup required for the workload, for\n    example dismissing popups or configuring and required settings.\n  - ``runWorkload`` should be used to perform the actual measurable work of the workload.\n  - ``extractResults`` should be used to extract any relevant results from the\n    target after the workload has been completed.\n  - ``teardown`` should be used to perform any final clean up of the workload on the target.\n\n.. note:: The ``initialize`` method should have the ``@Before`` tag attached\n     to the method which will cause it   to be ran before each of the stages of\n     the workload.  The remaining method should all have the ``@Test`` tag\n     attached to the method to indicate that this is a test stage that should be\n     called at the appropriate time.\n\nGUI Functionality\n^^^^^^^^^^^^^^^^^\nFor UI based applications all UI functionality has been re-factored to into a\n``gui`` attribute which currently will be either a ``UiAutomatorGUI`` object or\na ``ReventGUI`` depending on the workload type. This means that for example if\nyou wish to pass parameters to a UiAuotmator workload you will now need to use\n``self.gui.uiauto_params['Parameter Name'] = value``\n\nAttributes\n^^^^^^^^^^\n- The old ``package`` attribute has been replaced by ``package_names`` which\n  expects a list of strings which allows for multiple package names to be\n  specified if required. It is also no longer required to explicitly state the\n  launch-able activity, this will be automatically discovered from the apk so this\n  workload attribute can be removed.\n\n- The ``device`` attribute of the workload is now a devlib ``target``. Some of the\n  command names remain the same, however there will be differences. The API can be\n  found at http://devlib.readthedocs.io/en/latest/target.html however some of\n  the more common changes can be found below:\n\n\n  +----------------------------------------------+---------------------------------+\n  | Original Method                              | New Method                      |\n  +----------------------------------------------+---------------------------------+\n  |``self.device.pull_file(file)``               | ``self.target.pull(file)``      |\n  +----------------------------------------------+---------------------------------+\n  |``self.device.push_file(file)``               | ``self.target.push(file)``      |\n  +----------------------------------------------+---------------------------------+\n  |``self.device.install_executable(file)``      |  ``self.target.install(file)``  |\n  +----------------------------------------------+---------------------------------+\n  |``self.device.execute(cmd, background=True)`` |  ``self.target.background(cmd)``|\n  +----------------------------------------------+---------------------------------+\n"
  },
  {
    "path": "doc/source/plugins.rst",
    "content": ".. _plugin-reference:\n\n=================\nPlugin Reference\n=================\n\nThis section lists Plugins that currently come with WA3. Each package below\nrepresents a particular type of extension (e.g. a workload); each sub-package of\nthat package is a particular instance of that extension (e.g. the Andebench\nworkload). Clicking on a link will show what the individual extension does,\nwhat configuration parameters it takes, etc.\n\nFor how to implement you own Plugins, please refer to the guides in the\n:ref:`writing plugins <writing-plugins>` section.\n\n.. raw:: html\n\n   <style>\n   td {\n      vertical-align: text-top;\n   }\n   </style>\n   <table <tr><td>\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins/workloads\n\n.. raw:: html\n\n   </td><td>\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins/instruments\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins/energy_instrument_backends\n\n\n\n.. raw:: html\n\n   </td><td>\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins/output_processors\n\n.. raw:: html\n\n   </td><td>\n\n.. toctree::\n   :maxdepth: 2\n\n   plugins/targets\n\n.. raw:: html\n\n   </td></tr></table>\n\n"
  },
  {
    "path": "doc/source/user_information/how_to.rst",
    "content": "*******\nHow Tos\n*******\n\n.. contents:: Contents\n   :depth: 4\n   :local:\n\n.. include:: user_information/how_tos/agenda.rst\n.. include:: user_information/how_tos/device_setup.rst\n.. include:: user_information/how_tos/revent.rst\n"
  },
  {
    "path": "doc/source/user_information/how_tos/agenda.rst",
    "content": ".. _agenda:\n\nDefining Experiments With an Agenda\n===================================\n\nAn agenda specifies what is to be done during a Workload Automation run,\nincluding which workloads will be run, with what configuration, which\naugmentations will be enabled, etc. Agenda syntax is designed to be both\nsuccinct and expressive.\n\nAgendas are specified using YAML_ notation. It is recommended that you\nfamiliarize yourself with the linked page.\n\n.. _YAML: http://en.wikipedia.org/wiki/YAML\n\nSpecifying which workloads to run\n---------------------------------\n\nThe central purpose of an agenda is to specify what workloads to run. A\nminimalist agenda contains a single entry at the top level called \"workloads\"\nthat maps onto a list of workload names to run:\n\n.. code-block:: yaml\n\n        workloads:\n                - dhrystone\n                - memcpy\n                - rt_app\n\nThis specifies a WA run consisting of ``dhrystone`` followed by ``memcpy``, followed by\n``rt_app`` workloads, and using the augmentations specified in\nconfig.yaml (see :ref:`configuration-specification` section).\n\n.. note:: If you're familiar with YAML, you will recognize the above as a single-key\n          associative array mapping onto a list. YAML has two notations for both\n          associative arrays and lists: block notation (seen above) and also\n          in-line notation. This means that the above agenda can also be\n          written in a single line as ::\n\n                workloads: [dhrystone, memcpy, rt-app]\n\n          (with the list in-lined), or ::\n\n                {workloads: [dhrystone, memcpy, rt-app]}\n\n          (with both the list and the associative array in-line). WA doesn't\n          care which of the notations is used as they all get parsed into the\n          same structure by the YAML parser. You can use whatever format you\n          find easier/clearer.\n\n.. note:: WA plugin names are case-insensitive, and dashes (``-``) and\n          underscores (``_``) are treated identically. So all of the following\n          entries specify the same workload: ``rt_app``, ``rt-app``, ``RT-app``.\n\nMultiple iterations\n-------------------\n\nThere will normally be some variability in workload execution when running on a\nreal device. In order to quantify it, multiple iterations of the same workload\nare usually performed. You can specify the number of iterations for each\nworkload by adding ``iterations`` field to the workload specifications (or\n\"specs\"):\n\n.. code-block:: yaml\n\n        workloads:\n                - name: dhrystone\n                  iterations: 5\n                - name: memcpy\n                  iterations: 5\n                - name: cyclictest\n                  iterations: 5\n\nNow that we're specifying both the workload name and the number of iterations in\neach spec, we have to explicitly name each field of the spec.\n\nIt is often the case that, as in in the example above, you will want to run all\nworkloads for the same number of iterations. Rather than having to specify it\nfor each and every spec, you can do with a single entry by adding `iterations`\nto your ``config`` section in your agenda:\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n        workloads:\n                - dhrystone\n                - memcpy\n                - cyclictest\n\nIf the same field is defined both in config section and in a spec, then the\nvalue in the spec will overwrite the  value. For example, suppose we\nwanted to run all our workloads for five iterations, except cyclictest which we\nwant to run for ten (e.g. because we know it to be particularly unstable). This\ncan be specified like this:\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n        workloads:\n                - dhrystone\n                - memcpy\n                - name: cyclictest\n                  iterations: 10\n\nAgain, because we are now specifying two fields for cyclictest spec, we have to\nexplicitly name them.\n\nConfiguring Workloads\n---------------------\n\nSome workloads accept configuration parameters that modify their behaviour. These\nparameters are specific to a particular workload and can alter the workload in\nany number of ways, e.g. set the duration for which to run, or specify a media\nfile to be used, etc. The vast majority of workload parameters will have some\ndefault value, so it is only necessary to specify the name of the workload in\norder for WA to run it. However, sometimes you want more control over how a\nworkload runs.\n\nFor example, by default, dhrystone will execute 10 million loops across four\nthreads. Suppose your device has six cores available and you want the workload to\nload them all. You also want to increase the total number of loops accordingly\nto 15 million. You can specify this using dhrystone's parameters:\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n        workloads:\n                - name: dhrystone\n                  params:\n                        threads: 6\n                        mloops: 15\n                - memcpy\n                - name: cyclictest\n                  iterations: 10\n\n.. note:: You can find out what parameters a workload accepts by looking it up\n          in the :ref:`Workloads` section or using WA itself with \"show\"\n          command::\n\n                wa show dhrystone\n\n          see the :ref:`Invocation` section for details.\n\nIn addition to configuring the workload itself, we can also specify\nconfiguration for the underlying device which can be done by setting runtime\nparameters in the workload spec. Explicit runtime parameters have been exposed for\nconfiguring cpufreq, hotplug and cpuidle. For more detailed information on Runtime\nParameters see the :ref:`runtime parameters <runtime-parameters>` section. For\nexample, suppose we want to ensure the maximum score for our benchmarks, at the\nexpense of power consumption so we want to set the cpufreq governor to\n\"performance\" and enable all of the cpus on the device, (assuming there are 8\ncpus available), which can be done like this:\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n        workloads:\n                - name: dhrystone\n                  runtime_params:\n                        governor: performance\n                        num_cores: 8\n                  workload_params:\n                        threads: 6\n                        mloops: 15\n                - memcpy\n                - name: cyclictest\n                  iterations: 10\n\n\nI've renamed ``params`` to   ``workload_params`` for clarity,\nbut that wasn't strictly necessary as ``params`` is interpreted as\n``workload_params`` inside a workload spec.\n\nRuntime parameters do not automatically reset at the end of workload spec\nexecution, so all subsequent iterations will also be affected unless they\nexplicitly change the parameter (in the example above, performance governor will\nalso be used for ``memcpy`` and ``cyclictest``. There are two ways around this:\neither set ``reboot_policy`` WA setting (see :ref:`configuration-specification`\nsection) such that the device gets rebooted between job executions, thus being\nreturned to its initial state, or set the default runtime parameter values in\nthe ``config`` section of the agenda so that they get set for every spec that\ndoesn't explicitly override them.\n\nIf additional configuration of the device is required which are not exposed via\nthe built in runtime parameters, you can write a value to any file exposed on\nthe device using ``sysfile_values``, for example we could have also performed\nthe same configuration manually (assuming we have a big.LITTLE system and our\ncores 0-3 and 4-7 are in 2 separate DVFS domains and so setting the governor for\ncpu0 and cpu4 will affect all our cores) e.g.\n\n.. code-block:: yaml\n\n\n        config:\n                iterations: 5\n        workloads:\n                - name: dhrystone\n                runtime_params:\n                        sysfile_values:\n                            /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor: performance\n                            /sys/devices/system/cpu/cpu4/cpufreq/scaling_governor: performance\n                            /sys/devices/system/cpu/cpu0/online: 1\n                            /sys/devices/system/cpu/cpu1/online: 1\n                            /sys/devices/system/cpu/cpu2/online: 1\n                            /sys/devices/system/cpu/cpu3/online: 1\n                            /sys/devices/system/cpu/cpu4/online: 1\n                            /sys/devices/system/cpu/cpu5/online: 1\n                            /sys/devices/system/cpu/cpu6/online: 1\n                            /sys/devices/system/cpu/cpu7/online: 1\n                workload_params:\n                        threads: 6\n                        mloops: 15\n            - memcpy\n            - name: cyclictest\n                iterations: 10\n\nHere, we're specifying a ``sysfile_values`` runtime parameter for the device.\nFor more information please see :ref:`setting sysfiles <setting-sysfiles>`.\n\nAPK Workloads\n^^^^^^^^^^^^^\n\nWA has various resource getters that can be configured to locate APK files but\nfor most people APK files should be kept in the\n``$WA_USER_DIRECTORY/dependencies/SOME_WORKLOAD/`` directory. (by default\n``~/.workload_automation/dependencies/SOME_WORKLOAD/``). The\n``WA_USER_DIRECTORY`` environment variable can be used to change the location of\nthis directory. The APK files need to be put into the corresponding directories for\nthe workload they belong to. The name of the file can be anything but as\nexplained below may need to contain certain pieces of information.\n\nAll ApkWorkloads have parameters that affect the way in which APK files are\nresolved, ``exact_abi``, ``force_install`` and ``prefer_host_package``. Their\nexact behaviours are outlined below.\n\n:exact_abi: If this setting is enabled WA's resource resolvers will look for the\n   devices ABI with any native code present in the apk. By default this setting\n   is disabled since most apks will work across all devices. You may wish to\n   enable this feature when working with devices that support multiple ABI's\n   (like 64-bit devices that can run 32-bit APK files) and are specifically\n   trying to test one or the other.\n\n:force_install: If this setting is enabled WA will *always* use the APK file on\n   the host, and re-install it on every iteration. If there is no APK on the\n   host that is a suitable version and/or ABI for the workload WA will error\n   when ``force_install`` is enabled.\n\n:prefer_host_package: This parameter is used to specify a preference over host\n   or target versions of the app. When set to ``True`` WA will prefer the host\n   side version of the APK. It will check if the host has the APK and whether it\n   meets the version requirements of the workload. If so, and the target also\n   already has same version nothing will be done, otherwise WA will overwrite\n   the targets installed application with the host version. If the host is\n   missing the APK or it does not meet version requirements WA will fall back to\n   the app on the target if present and is a suitable version. When this\n   parameter is set to ``False`` WA will prefer to use the version already on\n   the target if it meets the workloads version requirements. If it does not it\n   will fall back to searching the host for the correct version. In both modes\n   if neither the host nor target have a suitable version, WA will produce and\n   error and will not run the workload.\n\n:version: This parameter is used to specify which version of uiautomation for\n   the workload is used. In some workloads e.g. ``geekbench`` multiple versions\n   with drastically different UI's are supported. A APKs version will be\n   automatically extracted therefore it is possible to have multiple apks for\n   different versions of a workload present on the host and select between which\n   is used for a particular job by specifying the relevant version in your\n   :ref:`agenda <agenda>`.\n\n:variant_name: Some workloads use variants of APK files, this is usually the\n   case with web browser APK files, these work in exactly the same way as the\n   version.\n\n\nIDs and Labels\n--------------\n\nIt is possible to list multiple specs with the same workload in an agenda. You\nmay wish to do this if you want to run a workload with different parameter values\nor under different runtime configurations of the device. The workload name\ntherefore does not uniquely identify a spec. To be able to distinguish between\ndifferent specs (e.g. in reported results), each spec has an ID which is unique\nto all specs within an agenda (and therefore with a single WA run). If an ID\nisn't explicitly specified using ``id`` field (note that the field name is in\nlower case), one will be automatically assigned to the spec at the beginning of\nthe WA run based on the position of the spec within the list. The first spec\n*without an explicit ID* will be assigned ID ``wk1``, the second spec *without an\nexplicit ID*  will be assigned ID ``wk2``, and so forth.\n\nNumerical IDs aren't particularly easy to deal with, which is why it is\nrecommended that, for non-trivial agendas, you manually set the ids to something\nmore meaningful (or use labels -- see below). An ID can be pretty much anything\nthat will pass through the YAML parser. The only requirement is that it is\nunique to the agenda. However, is usually better to keep them reasonably short\n(they don't need to be *globally* unique), and to stick with alpha-numeric\ncharacters and underscores/dashes. While WA can handle other characters as well,\ngetting too adventurous with your IDs may cause issues further down the line\nwhen processing WA output (e.g. when uploading them to a database that may have\nits own restrictions).\n\nIn addition to IDs, you can also specify labels for your workload specs. These\nare similar to IDs but do not have the uniqueness restriction. If specified,\nlabels will be used by some output processes instead of (or in addition to) the\nworkload name. For example, the ``csv`` output processor will put the label in the\n\"workload\" column of the CSV file.\n\nIt is up to you how you chose to use IDs and labels. WA itself doesn't expect\nany particular format (apart from uniqueness for IDs). Below is the earlier\nexample updated to specify explicit IDs and label dhrystone spec to reflect\nparameters used.\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n        workloads:\n                - id: 01_dhry\n                  name: dhrystone\n                  label: dhrystone_15over6\n                  runtime_params:\n                        cpu0_governor: performance\n                  workload_params:\n                        threads: 6\n                        mloops: 15\n                - id: 02_memc\n                  name: memcpy\n                - id: 03_cycl\n                  name: cyclictest\n                  iterations: 10\n\n.. _using-classifiers:\n\nClassifiers\n------------\n\nClassifiers can be used in 2 distinct ways, the first use is being supplied in\nan agenda as a set of key-value pairs which can be used to help identify sub-tests\nof a run, for example if you have multiple sections in your agenda running\nyour workloads at different frequencies you might want to set a classifier\nspecifying which frequencies are being used. These can then be utilized later,\nfor example with the ``csv`` :ref:`output processor <output-processors>` with\n``use_all_classifiers`` set to ``True`` and this will add additional columns to\nthe output file for each of the classifier keys that have been specified\nallowing for quick comparison.\n\nAn example agenda is shown here:\n\n.. code-block:: yaml\n\n        config:\n            augmentations:\n                - csv\n            iterations: 1\n            device: generic_android\n            csv:\n                use_all_classifiers: True\n        sections:\n            - id: max_speed\n              runtime_parameters:\n                  frequency: 1700000\n              classifiers:\n                  freq: 1700000\n            - id: min_speed\n              runtime_parameters:\n                  frequency: 200000\n              classifiers:\n                  freq: 200000\n        workloads:\n        -   name: recentfling\n\nThe other way that they can used is by being automatically added by some\nworkloads to identify their results metrics and artifacts. For example some\nworkloads perform multiple tests with the same execution run and therefore will\nuse metrics to differentiate between them, e.g. the ``recentfling`` workload\nwill use classifiers to distinguish between which loop a particular result is\nfor or whether it is an average across all loops ran.\n\nThe output from the agenda above will produce a csv file similar to what is\nshown below. Some columns have been omitted for clarity however as can been seen\nthe custom **frequency** classifier column has been added and populated, along\nwith the **loop** classifier added by the workload.\n\n::\n\n id              | workload      | metric                    | freq      | loop    | value ‖\n max_speed-wk1   | recentfling   | 90th Percentile           | 1700000   | 1       | 8     ‖\n max_speed-wk1   | recentfling   | 95th Percentile           | 1700000   | 1       | 9     ‖\n max_speed-wk1   | recentfling   | 99th Percentile           | 1700000   | 1       | 16    ‖\n max_speed-wk1   | recentfling   | Jank                      | 1700000   | 1       | 11    ‖\n max_speed-wk1   | recentfling   | Jank%                     | 1700000   | 1       | 1     ‖\n # ...\n max_speed-wk1   | recentfling   | Jank                      | 1700000   | 3       | 1     ‖\n max_speed-wk1   | recentfling   | Jank%                     | 1700000   | 3       | 0     ‖\n max_speed-wk1   | recentfling   | Average 90th Percentqile  | 1700000   | Average | 7     ‖\n max_speed-wk1   | recentfling   | Average 95th Percentile   | 1700000   | Average | 8     ‖\n max_speed-wk1   | recentfling   | Average 99th Percentile   | 1700000   | Average | 14    ‖\n max_speed-wk1   | recentfling   | Average Jank              | 1700000   | Average | 6     ‖\n max_speed-wk1   | recentfling   | Average Jank%             | 1700000   | Average | 0     ‖\n min_speed-wk1   | recentfling   | 90th Percentile           | 200000    | 1       | 7     ‖\n min_speed-wk1   | recentfling   | 95th Percentile           | 200000    | 1       | 8     ‖\n min_speed-wk1   | recentfling   | 99th Percentile           | 200000    | 1       | 14    ‖\n min_speed-wk1   | recentfling   | Jank                      | 200000    | 1       | 5     ‖\n min_speed-wk1   | recentfling   | Jank%                     | 200000    | 1       | 0     ‖\n # ...\n min_speed-wk1   | recentfling   | Jank                      | 200000    | 3       | 5     ‖\n min_speed-wk1   | recentfling   | Jank%                     | 200000    | 3       | 0     ‖\n min_speed-wk1   | recentfling   | Average 90th Percentile   | 200000    | Average | 7     ‖\n min_speed-wk1   | recentfling   | Average 95th Percentile   | 200000    | Average | 8     ‖\n min_speed-wk1   | recentfling   | Average 99th Percentile   | 200000    | Average | 13    ‖\n min_speed-wk1   | recentfling   | Average Jank              | 200000    | Average | 4     ‖\n min_speed-wk1   | recentfling   | Average Jank%             | 200000    | Average | 0     ‖\n\n\n\n.. _sections:\n\nSections\n--------\n\nIt is a common requirement to be able to run the same set of workloads under\ndifferent device configurations. E.g. you may want to investigate the impact of\nchanging a particular setting to different values on the benchmark scores, or to\nquantify the impact of enabling a particular feature in the kernel. WA allows\nthis by defining \"sections\" of configuration with an agenda.\n\nFor example, suppose that we want to measure the impact of using 3 different\ncpufreq governors on 2 benchmarks. We could create 6 separate workload specs\nand set the governor runtime parameter for each entry. However, this\nintroduces a lot of duplication; and what if we want to change spec\nconfiguration? We would have to change it in multiple places, running the risk\nof forgetting one.\n\nA better way is to keep the two workload specs and define a section for each\ngovernor:\n\n.. code-block:: yaml\n\n        config:\n                iterations: 5\n                augmentations:\n                    - ~cpufreq\n                    - csv\n                sysfs_extractor:\n                        paths: [/proc/meminfo]\n                csv:\n                    use_all_classifiers: True\n        sections:\n                - id: perf\n                  runtime_params:\n                        cpu0_governor: performance\n                - id: inter\n                  runtime_params:\n                        cpu0_governor: interactive\n                - id: sched\n                  runtime_params:\n                        cpu0_governor: sched\n        workloads:\n                - id: 01_dhry\n                  name: dhrystone\n                  label: dhrystone_15over6\n                  workload_params:\n                        threads: 6\n                        mloops: 15\n                - id: 02_memc\n                  name: memcpy\n                  augmentations: [sysfs_extractor]\n\nA section, just like an workload spec, needs to have a unique ID. Apart from\nthat, a \"section\" is similar to the ``config`` section we've already seen --\neverything that goes into a section will be applied to each workload spec.\nWorkload specs defined under top-level ``workloads`` entry will be executed for\neach of the sections listed under ``sections``.\n\n.. note:: It is also possible to have a ``workloads`` entry within a section,\n          in which case, those workloads will only be executed for that specific\n          section.\n\nIn order to maintain the uniqueness requirement of workload spec IDs, they will\nbe namespaced under each section by prepending the section ID to the spec ID\nwith a dash. So in the agenda above, we no longer have a workload spec\nwith ID ``01_dhry``, instead there are two specs with IDs ``perf-01-dhry`` and\n``inter-01_dhry``.\n\nNote that the ``config`` section still applies to every spec in the agenda. So\nthe precedence order is -- spec settings override section settings, which in\nturn override global settings.\n\n\n.. _section-groups:\n\nSection Groups\n---------------\n\nSection groups are a way of grouping sections together and are used to produce a\ncross product of each of the different groups. This can be useful when you want\nto run a set of experiments with all the available combinations without having\nto specify each combination manually.\n\nFor example if we want to investigate the differences between running the\nmaximum and minimum frequency with both the maximum and minimum number of cpus\nonline, we can create an agenda as follows:\n\n.. code-block:: yaml\n\n        sections:\n          - id: min_freq\n          runtime_parameters:\n              freq: min\n          group: frequency\n         - id: max_freq\n          runtime_parameters:\n              freq: max\n          group: frequency\n\n         - id: min_cpus\n           runtime_parameters:\n              cpus: 1\n          group: cpus\n         - id: max_cpus\n           runtime_parameters:\n              cpus: 8\n          group: cpus\n\n        workloads:\n        -  dhrystone\n\nThis will results in 8 jobs being generated for each of the possible combinations.\n\n::\n\n      min_freq-min_cpus-wk1 (dhrystone)\n      min_freq-max_cpus-wk1 (dhrystone)\n      max_freq-min_cpus-wk1 (dhrystone)\n      max_freq-max_cpus-wk1 (dhrystone)\n      min_freq-min_cpus-wk1 (dhrystone)\n      min_freq-max_cpus-wk1 (dhrystone)\n      max_freq-min_cpus-wk1 (dhrystone)\n      max_freq-max_cpus-wk1 (dhrystone)\n\nEach of the generated jobs will have :ref:`classifiers <classifiers>` for\neach group and the associated id automatically added.\n\n.. code-block:: python\n\n      # ...\n      print('Job ID: {}'.format(job.id))\n      print('Classifiers:')\n      for k, v in job.classifiers.items():\n          print('  {}: {}'.format(k, v))\n\n      Job ID: min_freq-min_cpus-no_idle-wk1\n      Classifiers:\n          frequency: min_freq\n          cpus: min_cpus\n\n\n.. _augmentations:\n\nAugmentations\n--------------\n\nAugmentations are plugins that augment the execution of workload jobs with\nadditional functionality; usually, that takes the form of generating additional\nmetrics and/or artifacts, such as traces or logs. There are two types of\naugmentations:\n\nInstruments\n        These \"instrument\" a WA run in order to change it's behaviour (e.g.\n        introducing delays between successive job executions), or collect\n        additional measurements (e.g. energy usage). Some instruments may depend\n        on particular features being enabled on the target (e.g. cpufreq), or\n        on additional hardware (e.g. energy probes).\n\nOutput processors\n        These post-process metrics and artifacts generated by workloads or\n        instruments, as well as target metadata collected by WA, in order to\n        generate additional metrics and/or artifacts (e.g. generating statistics\n        or reports). Output processors are also used to export WA output\n        externally (e.g. upload to a database).\n\nThe main practical difference between instruments and output processors, is that\nthe former rely on an active connection to the target to function, where as the\nlatter only operated on previously collected results and metadata. This means\nthat output processors can run \"off-line\" using ``wa process`` command.\n\nBoth instruments and output processors are configured in the same way in the\nagenda, which is why they are grouped together into \"augmentations\".\nAugmentations are enabled by listing them under ``augmentations`` entry in a\nconfig file or ``config`` section of the agenda.\n\n.. code-block:: yaml\n\n        config:\n                augmentations: [trace-cmd]\n\nThe code above illustrates an agenda entry to enabled ``trace-cmd`` instrument.\n\nIf your have multiple ``augmentations`` entries (e.g. both, in your config file\nand in the agenda), then they will be combined, so that the final  set of\naugmentations for the run  will be their union.\n\n.. note:: WA2 did not have have augmentationts, and instead supported\n          \"instrumentation\" and \"result_processors\" as distinct configuration\n          enetries. For compantibility, these entries are still supported in\n          WA3, however they should be considered to be depricated, and their\n          use is discouraged.\n\n\nConfiguring augmentations\n^^^^^^^^^^^^^^^^^^^^^^^^^\n\nMost augmentations will take parameters that modify their behavior. Parameters\navailable for a particular augmentation can be viewed using ``wa show\n<augmentation name>`` command. This will also show the default values used.\nValues for these parameters can be specified by creating an entry with the\naugmentation's name, and specifying parameter values under it.\n\n.. code-block:: yaml\n\n        config:\n                augmentations: [trace-cmd]\n                trace-cmd:\n                        events: ['sched*', 'power*', irq]\n                        buffer_size: 100000\n\nThe code above specifies values for ``events`` and ``buffer_size`` parameters\nfor the ``trace-cmd`` instrument, as well as enabling it.\n\nYou may specify configuration for the same augmentation in multiple locations\n(e.g. your config file and the config section of the agenda). These entries will\nbe combined to form the final configuration for the augmentation used during the\nrun. If different values for the same parameter are present in multiple entries,\nthe ones \"more specific\" to a particular run will be used (e.g. values in the\nagenda will override those in the config file).\n\n.. note:: Creating an entry for an augmentation alone does not enable it! You\n          **must** list it under ``augmentations`` in order for it to be enabed\n          for a run. This makes it easier to quickly enabled and diable\n          augmentations with complex configurations, and also allows defining\n          \"static\" configuation in top-level config, without actually enabling\n          the augmentation for all runs.\n\n\nDisabling augmentations\n^^^^^^^^^^^^^^^^^^^^^^^\n\nSometimes, you may wish to disable an augmentation for a particular run, but you\nwant to keep it enabled in general. You *could* modify your config file to\ntemporarily disable it. However, you must then remember to re-enable it\nafterwards. This could be inconvenient and error prone, especially if you're\nrunning multiple experiments in parallel and only want to disable the\naugmentation for one of them.\n\nInstead, you can explicitly disable augmentation by specifying its name prefixed\nwith a tilde (``~``) inside ``augumentations``.\n\n.. code-block:: yaml\n\n        config:\n                augmentations: [trace-cmd, ~cpufreq]\n\nThe code above enables ``trace-cmd`` instrument and disables ``cpufreq``\ninstrument (which is enabled in the default config).\n\nIf you want to start configuration for an experiment form a \"blank slate\" and\nwant to disable all previously-enabled augmentations, without necessarily\nknowing what they are, you can use the special ``~~`` entry.\n\n.. code-block:: yaml\n\n        config:\n                augmentations: [~~, trace-cmd, csv]\n\nThe code above disables all augmentations enabled up to that point, and enabled\n``trace-cmd`` and ``csv`` for this run.\n\n.. note:: The ``~~`` only disables augmentations from previously-processed\n          sources. Its ordering in the list does not matter. For example,\n          specifying ``augmentations: [trace-cmd, ~~, csv]`` will have exactly\n          the same effect as above -- i.e. both trace-cmd *and* csv will be\n          enabled.\n\nWorkload-specific augmentation\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nIt is possible to enable or disable (but not configure) augmentations at\nworkload or section level, as well as in the global config, in which case, the\naugmentations would only be enabled/disabled for that workload/section. If the\nsame augmentation is enabled at one level and disabled at another, as with all\nWA configuration, the more specific settings will take precedence over the less\nspecific ones (i.e. workloads override sections that, in turn, override global\nconfig).\n\n\nAugmentations Example\n^^^^^^^^^^^^^^^^^^^^^\n\n\n.. code-block:: yaml\n\n        config:\n                augmentations: [~~, fps]\n                trace-cmd:\n                        events: ['sched*', 'power*', irq]\n                        buffer_size: 100000\n                file_poller:\n                        files:\n                                - /sys/class/thermal/thermal_zone0/temp\n        sections:\n                - classifers:\n                        type: energy\n                augmentations: [energy_measurement]\n                - classifers:\n                        type: trace\n                augmentations: [trace-cmd, file_poller]\n        workloads:\n                - gmail\n                - geekbench\n                - googleplaybooks\n                - name: dhrystone\n                  augmentations: [~fps]\n\nThe example above shows an experiment that runs a number of workloads in order\nto evaluate their thermal impact and energy usage. All previously-configured\naugmentations are disabled with ``~~``, so that only configuration specified in\nthis agenda is enabled. Since most of the workloads are \"productivity\" use cases\nthat do not generate their own metrics, ``fps`` instrument is enabled to get\nsome meaningful performance metrics for them; the only exception is\n``dhrystone`` which is a benchmark that reports its own metrics and has not GUI,\nso the instrument is disabled for it using ``~fps``.\n\nEach workload will be run in two configurations: once, to collect energy\nmeasurements, and once to collect thermal data and kernel trace. Trace can give\ninsight into why a workload is using more or less energy than expected, but it\ncan be relatively intrusive and might impact absolute energy and performance\nmetrics, which is why it is collected separately. Classifiers_ are used to\nseparate metrics from the two configurations in the results.\n\n.. _other-agenda-configuration:\n\nOther Configuration\n-------------------\n\n.. _configuration_in_agenda:\n\nAs mentioned previously, ``config`` section in an agenda can contain anything\nthat can be defined in ``config.yaml``. Certain configuration (e.g. ``run_name``)\nmakes more sense to define in an agenda than a config file. Refer to the\n:ref:`configuration-specification` section for details.\n\n.. code-block:: yaml\n\n        config:\n                project: governor_comparison\n                run_name: performance_vs_interactive\n\n                device: generic_android\n                reboot_policy: never\n\n                iterations: 5\n                augmentations:\n                    - ~cpufreq\n                    - csv\n                sysfs_extractor:\n                        paths: [/proc/meminfo]\n                csv:\n                    use_all_classifiers: True\n        sections:\n                - id: perf\n                  runtime_params:\n                        sysfile_values:\n                        cpu0_governor: performance\n                - id: inter\n                  runtime_params:\n                        cpu0_governor: interactive\n        workloads:\n                - id: 01_dhry\n                  name: dhrystone\n                  label: dhrystone_15over6\n                  workload_params:\n                        threads: 6\n                        mloops: 15\n                - id: 02_memc\n                  name: memcpy\n                  augmentations: [sysfs_extractor]\n                - id: 03_cycl\n                  name: cyclictest\n                  iterations: 10\n"
  },
  {
    "path": "doc/source/user_information/how_tos/device_setup.rst",
    "content": ".. _setting-up-a-device:\n\nSetting Up A Device\n===================\n\nWA should work with most Android devices out-of-the box, as long as the device\nis discoverable by ``adb`` (i.e. gets listed when you run ``adb devices``). For\nUSB-attached devices, that should be the case; for network devices, ``adb connect``\nwould need to be invoked with the IP address of the device. If there is only one\ndevice connected to the host running WA, then no further configuration should be\nnecessary (though you may want to :ref:`tweak some Android settings <configuring-android>`\\ ).\n\nIf you have multiple devices connected, have a non-standard Android build (e.g.\non a development board), or want to use of the more advanced WA functionality,\nfurther configuration will be required.\n\nAndroid\n-------\n\n.. _android-general-device-setup:\n\nGeneral Device Setup\n^^^^^^^^^^^^^^^^^^^^\n\nYou can specify the device interface by setting ``device`` setting in a\n``config`` file or section. Available interfaces can be viewed by running ``wa\nlist targets`` command. If you don't see your specific platform listed (which is\nlikely unless you're using one of the Arm-supplied platforms), then you should\nuse ``generic_android`` interface (this is what is used by the default config).\n\n.. code-block:: yaml\n\n        device: generic_android\n\nThe device interface may be configured through ``device_config`` setting, who's\nvalue is a ``dict`` mapping setting names to their values. Some of the most\ncommon parameters you might want to change are outlined below.\n\n:device: If you have multiple Android devices connected to the host machine, you will\n   need to set this to indicate to WA which device you want it to use. The will\n   be the adb name the is displayed when running ``adb devices``\n\n:working_directory: WA needs a \"working\" directory on the device which it will use for collecting\n   traces, caching assets it pushes to the device, etc. By default, it will\n   create one under ``/sdcard`` which should be mapped and writable on standard\n   Android builds. If this is not the case for your device, you will need to\n   specify an alternative working directory (e.g. under ``/data/local``).\n\n:load_default_modules: A number of \"default\" modules (e.g. for cpufreq\n  subsystem) are loaded automatically, unless explicitly disabled. If you\n  encounter an issue with one of the modules then this setting can be set to\n  ``False`` and any specific modules that you require can be request via the\n  ``modules`` entry.\n\n:modules: A list of additional modules to be installed for the target. Devlib\n  implements functionality for particular subsystems as modules. If additional\n  modules need to be loaded, they may be specified using this parameter.\n\n  Please see the `devlib documentation <http://devlib.readthedocs.io/en/latest/modules.html>`_\n  for information on the available modules.\n\n.. _core-names:\n\n:core_names: ``core_names`` should be a list of core names matching the order in which\n   they are exposed in sysfs. For example, Arm TC2 SoC is a 2x3 big.LITTLE\n   system; its core_names would be ``['a7', 'a7', 'a7', 'a15', 'a15']``,\n   indicating that cpu0-cpu2 in cpufreq sysfs structure are A7's and cpu3 and\n   cpu4 are A15's.\n\n   .. note:: This should not usually need to be provided as it will be\n             automatically extracted from the target.\n\n\nA typical ``device_config`` inside ``config.yaml`` may look something like\n\n\n.. code-block:: yaml\n\n        device_config:\n                device: 0123456789ABCDEF\n        # ...\n\n\nor a more specific config could be:\n\n.. code-block:: yaml\n\n        device_config:\n                device: 0123456789ABCDEF\n                working_direcory: '/sdcard/wa-working'\n                load_default_modules: True\n                modules: ['hotplug', 'cpufreq']\n                core_names : ['a7', 'a7', 'a7', 'a15', 'a15']\n                # ...\n\n.. _configuring-android:\n\nConfiguring Android\n^^^^^^^^^^^^^^^^^^^\n\nThere are a few additional tasks you may need to perform once you have a device\nbooted into Android (especially if this is an initial boot of a fresh OS\ndeployment):\n\n        - You have gone through FTU (first time usage) on the home screen and\n          in the apps menu.\n        - You have disabled the screen lock.\n        - You have set sleep timeout to the highest possible value (30 mins on\n          most devices).\n        - You have set the locale language to \"English\" (this is important for\n          some workloads in which UI automation looks for specific text in UI\n          elements).\n\n\nJuno Setup\n----------\n\n.. note:: At the time of writing, the Android software stack on Juno was still\n          very immature. Some workloads may not run, and there maybe stability\n          issues with the device.\n\n\nThe full software stack can be obtained from Linaro:\n\nhttps://releases.linaro.org/android/images/lcr-reference-juno/latest/\n\nPlease follow the instructions on the \"Binary Image Installation\" tab on that\npage. More up-to-date firmware and kernel may also be obtained by registered\nmembers from ARM Connected Community: http://www.arm.com/community/ (though this\nis not guaranteed to work with the Linaro file system).\n\nUEFI\n^^^^\n\nJuno uses UEFI_ to boot the kernel image.  UEFI supports multiple boot\nconfigurations, and presents a menu on boot to select (in default configuration\nit will automatically boot the first entry in the menu if not interrupted before\na timeout). WA will look for a specific entry in the UEFI menu\n(``'WA'`` by default, but that may be changed by setting ``uefi_entry`` in the\n``device_config``). When following the UEFI instructions on the above Linaro\npage, please make sure to name the entry appropriately (or to correctly set the\n``uefi_entry``).\n\n.. _UEFI: http://en.wikipedia.org/wiki/UEFI\n\nThere are two supported ways for Juno to discover kernel images through UEFI. It\ncan either load them from NOR flash on the board, or from the boot partition on\nthe file system. The setup described on the Linaro page uses the boot partition\nmethod.\n\nIf WA does not find the UEFI entry it expects, it will create one. However, it\nwill assume that the kernel image resides in NOR flash, which means it will not\nwork with Linaro file system. So if you're replicating the Linaro setup exactly,\nyou will need to create the entry manually, as outline on the above-linked page.\n\nRebooting\n^^^^^^^^^\n\nAt the time of writing, normal Android reboot did not work properly on Juno\nAndroid, causing the device to crash into an irrecoverable state. Therefore, WA\nwill perform a hard reset to reboot the device. It will attempt to do this by\ntoggling the DTR line on the serial connection to the device. In order for this\nto work, you need to make sure that SW1 configuration switch on the back panel of\nthe board (the right-most DIP switch) is toggled *down*.\n\n\nLinux\n-----\n\nGeneral Device Setup\n^^^^^^^^^^^^^^^^^^^^\n\nYou can specify the device interface by setting ``device`` setting in a\n``config`` file or section. Available interfaces can be viewed by running\n``wa list targets`` command. If you don't see your specific platform listed\n(which is likely unless you're using one of the Arm-supplied platforms), then\nyou should use ``generic_linux`` interface.\n\n.. code-block:: yaml\n\n        device: generic_linux\n\nThe device interface may be configured through ``device_config`` setting, who's\nvalue is a ``dict`` mapping setting names to their values. Some of the most\ncommon parameters you might want to change are outlined below.\n\n\n:host: This should be either the the DNS name or IP address of the device.\n\n:username: The login name of the user on the device that WA will use. This user should\n   have a home directory (unless an alternative working directory is specified\n   using ``working_directory`` config -- see below), and, for full\n   functionality, the user should have sudo rights (WA will be able to use\n   sudo-less acounts but some instruments or workload may not work).\n\n:password: Password for the account on the device. Either this of a ``keyfile`` (see\n   below) must be specified.\n\n:keyfile: If key-based authentication is used, this may be used to specify the SSH identity\n   file instead of the password.\n\n:property_files: This is a list of paths that will be pulled for each WA run into the __meta\n   subdirectory in the results. The intention is to collect meta-data about the\n   device that may aid in reporducing the results later. The paths specified do\n   not have to exist on the device (they will be ignored if they do not). The\n   default list is ``['/proc/version', '/etc/debian_version', '/etc/lsb-release', '/etc/arch-release']``\n\n\nIn addition, ``working_directory``, ``core_names``, ``modules`` etc. can also\nbe specified and have the same meaning as for Android devices (see above).\n\nA typical ``device_config`` inside ``config.yaml`` may look something like\n\n\n.. code-block:: yaml\n\n        device_config:\n                host: 192.168.0.7\n                username: guest\n                password: guest\n                # ...\n\nChrome OS\n---------\n\nGeneral Device Setup\n^^^^^^^^^^^^^^^^^^^^\n\nYou can specify the device interface by setting ``device`` setting in a\n``config`` file or section. Available interfaces can be viewed by\nrunning ``wa list targets`` command. If you don't see your specific platform\nlisted (which is likely unless you're using one of the Arm-supplied platforms), then\nyou should use ``generic_chromeos`` interface.\n\n.. code-block:: yaml\n\n        device: generic_chromeos\n\nThe device interface may be configured through ``device_config`` setting, who's\nvalue is a ``dict`` mapping setting names to their values. The ChromeOS target\nis essentially the same as a linux device and requires a similar setup, however\nit also optionally supports connecting to an android container running on the\ndevice which will be automatically detected if present. If the device supports\nandroid applications then the android configuration is also supported. In order\nto support this WA will open 2 connections to the device, one via SSH to\nthe main OS and another via ADB to the android container where a limited\nsubset of functionality can be performed.\n\nIn order to distinguish between the two connections some of the android specific\nconfiguration has been renamed to reflect the destination.\n\n:android_working_directory: WA needs a \"working\" directory on the device which it will use for collecting\n   traces, caching assets it pushes to the device, etc. By default, it will\n   create one under ``/sdcard`` which should be mapped and writable on standard\n   Android builds. If this is not the case for your device, you will need to\n   specify an alternative working directory (e.g. under ``/data/local``).\n\n\nA typical ``device_config`` inside ``config.yaml`` for a ChromeOS device may\nlook something like\n\n.. code-block:: yaml\n\n        device_config:\n                host: 192.168.0.7\n                username: root\n                android_working_direcory: '/sdcard/wa-working'\n                # ...\n\n.. note:: This assumes that your Chromebook is in developer mode and is\n          configured to run an SSH server with the appropriate ssh keys added to the\n          authorized_keys file on the device.\n\n\nRelated Settings\n----------------\n\nReboot Policy\n^^^^^^^^^^^^^\n\nThis indicates when during WA execution the device will be rebooted. By default\nthis is set to ``as_needed``, indicating that WA will only reboot the device if\nit becomes unresponsive. Please see ``reboot_policy`` documentation in\n:ref:`configuration-specification` for more details.\n\nExecution Order\n^^^^^^^^^^^^^^^\n\n``execution_order`` defines the order in which WA will execute workloads.\n``by_iteration`` (set by default) will execute the first iteration of each spec\nfirst, followed by the second iteration of each spec (that defines more than one\niteration) and so forth. The alternative  will loop through all iterations for\nthe first first spec first, then move on to second spec, etc. Again, please see\n:ref:`configuration-specification` for more details.\n\n\nAdding a new target interface\n-----------------------------\n\nIf you are working with a particularly unusual device (e.g. a early stage\ndevelopment board) or need to be able to handle some quirk of your Android\nbuild, configuration available in ``generic_android`` interface may not be\nenough for you. In that case, you may need to write a custom interface for your\ndevice. A device interface is an ``Extension`` (a plug-in) type in WA and is\nimplemented similar to other extensions (such as workloads or instruments).\nPleaser refer to the\n:ref:`adding a custom target <adding-custom-target-example>` section for\ninformation on how this may be done.\n"
  },
  {
    "path": "doc/source/user_information/how_tos/revent.rst",
    "content": ".. _revent_files_creation:\n\nAutomating GUI Interactions With Revent\n=======================================\n\nOverview and Usage\n------------------\n\nThe revent utility can be used to record and later play back a sequence of user\ninput events, such as key presses and touch screen taps. This is an alternative\nto Android UI Automator for providing automation for workloads.\n\nUsing revent with workloads\n^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nSome workloads (pretty much all games) rely on recorded revents for their\nexecution. ReventWorkloads require between 1 and 4 revent files to be ran.\nThere is one mandatory recording, ``run``, for performing the actual execution of\nthe workload and the remaining stages are optional. ``setup`` can be used to perform\nthe initial setup (navigating menus, selecting game modes, etc).\n``extract_results`` can be used to perform any actions after the main stage of\nthe workload for example to navigate a results or summary screen of the app. And\nfinally ``teardown`` can be used to perform any final actions for example\nexiting the app.\n\nBecause revents are very device-specific\\ [*]_, these files would need to\nbe recorded for each device.\n\nThe files must be called ``<device name>.(setup|run|extract_results|teardown).revent``,\nwhere ``<device name>`` is the name of your device (as defined by the model\nname of your device which can be retrieved with\n``adb shell getprop ro.product.model`` or by the ``name`` attribute of your\ncustomized device class).\n\nWA will look for these files in two places:\n``<installdir>/wa/workloads/<workload name>/revent_files`` and\n``$WA_USER_DIRECTORY/dependencies/<workload name>``. The\nfirst location is primarily intended for revent files that come with WA (and if\nyou did a system-wide install, you'll need sudo to add files there), so it's\nprobably easier to use the second location for the files you record. Also, if\nrevent files for a workload exist in both locations, the files under\n``$WA_USER_DIRECTORY/dependencies`` will be used in favour\nof those installed with WA.\n\n.. [*] It's not just about screen resolution -- the event codes may be different\n       even if devices use the same screen.\n\n.. _revent-recording:\n\nRecording\n^^^^^^^^^\n\nWA features a ``record`` command that will automatically deploy and start revent\non the target device.\n\nIf you want to simply record a single recording on the device then the following\ncommand can be used which will save the recording in the current directory::\n\n    wa record\n\nThere is one mandatory stage called 'run' and 3 optional stages: 'setup',\n'extract_results' and 'teardown' which are used for playback of a workload.\nThe different stages are distinguished by the suffix in the recording file path.\nIn order to facilitate in creating these recordings you can specify ``--setup``,\n``--extract-results``, ``--teardown`` or ``--all`` to indicate which stages you\nwould like to create recordings for and the appropriate file name will be generated.\n\nYou can also directly specify a workload to create recordings for and WA will\nwalk you through the relevant steps. For example if we waned to create\nrecordings for the Angrybirds Rio workload we can specify the ``workload`` flag\nwith ``-w``. And in this case WA can be used to automatically deploy and launch\nthe workload and record ``setup`` (``-s``) , ``run`` (``-r``) and ``teardown``\n(``-t``) stages for the workload. In order to do this we would use the following\ncommand with an example output shown below::\n\n    wa record -srt -w angrybirds_rio\n\n::\n\n    INFO     Setting up target\n    INFO     Deploying angrybirds_rio\n    INFO     Press Enter when you are ready to record SETUP...\n    [Pressed Enter]\n    INFO     Press Enter when you have finished recording SETUP...\n    [Pressed Enter]\n    INFO     Pulling '<device_model>setup.revent' from device\n    INFO     Press Enter when you are ready to record RUN...\n    [Pressed Enter]\n    INFO     Press Enter when you have finished recording RUN...\n    [Pressed Enter]\n    INFO     Pulling '<device_model>.run.revent' from device\n    INFO     Press Enter when you are ready to record TEARDOWN...\n    [Pressed Enter]\n    INFO     Press Enter when you have finished recording TEARDOWN...\n    [Pressed Enter]\n    INFO     Pulling '<device_model>.teardown.revent' from device\n    INFO     Tearing down angrybirds_rio\n    INFO     Recording(s) are available at: '$WA_USER_DIRECTORY/dependencies/angrybirds_rio/revent_files'\n\nOnce you have made your desired recordings, you can either manually playback\nindividual recordings using the :ref:`replay <replay-command>` command or, with\nthe recordings in the appropriate dependencies location, simply run the workload\nusing the :ref:`run <run-command>` command and then all the available recordings will be\nplayed back automatically.\n\nFor more information on available arguments please see the :ref:`Record <record_command>`\ncommand.\n\n    .. note:: By default revent recordings are not portable across devices and\n              therefore will require recording for each new device you wish to use the\n              workload on. Alternatively a \"gamepad\" recording mode is also supported.\n              This mode requires a gamepad to be connected to the device when recording\n              but the recordings produced in this mode should be portable across devices.\n\n.. _revent_replaying:\n\nReplaying\n^^^^^^^^^\n\nIf you want to replay a single recorded file, you can use ``wa replay``\nproviding it with the file you want to replay. An example of the command output\nis shown below::\n\n        wa replay my_recording.revent\n        INFO     Setting up target\n        INFO     Pushing file to target\n        INFO     Starting replay\n        INFO     Finished replay\n\nIf you are using a device that supports android you can optionally specify a\npackage name to launch before replaying the recording.\n\nIf you have recorded the required files for your workload and have placed the in\nthe appropriate location (or specified the workload during recording) then you\ncan simply run the relevant workload and your recordings will be replayed at the\nappropriate times automatically.\n\nFor more information run please read :ref:`replay-command`\n\nRevent vs UiAutomator\n----------------------\n\nIn general, Android UI Automator is the preferred way of automating user input\nfor Android workloads because, unlike revent, UI Automator does not depend on a\nparticular screen resolution, and so is more portable across different devices.\nIt also gives better control and can potentially be faster for doing UI\nmanipulations, as input events are scripted based on the available UI elements,\nrather than generated by human input.\n\nOn the other hand, revent can be used to manipulate pretty much any workload,\nwhere as UI Automator only works for Android UI elements (such as text boxes or\nradio buttons), which makes the latter useless for things like games. Recording\nrevent sequence is also faster than writing automation code (on the other hand,\none would need maintain a different revent log for each screen resolution).\n\n.. note:: For ChromeOS targets, UI Automator can only be used with android\n          applications and not the ChomeOS host applications themselves.\n\n\n"
  },
  {
    "path": "doc/source/user_information/installation.rst",
    "content": ".. _installation:\n\n************\nInstallation\n************\n\n.. contents:: Contents\n   :depth: 2\n   :local:\n\n\n.. module:: wa\n\nThis page describes the 3 methods of installing Workload Automation 3. The first\noption is to use :ref:`pip` which will install the latest release of WA, the\nlatest development version from :ref:`github <github>` or via a\n:ref:`dockerfile`.\n\n\nPrerequisites\n=============\n\nOperating System\n----------------\n\nWA runs on a native Linux install. It has been tested on recent Ubuntu releases,\nbut other recent Linux distributions should work as well. It should run on\neither 32-bit or 64-bit OS, provided the correct version of dependencies (see\nbelow) are installed. Officially, **other environments are not supported**.\nWA has been known to run on Linux Virtual machines and in Cygwin environments,\nthough additional configuration may be required in both cases (known issues\ninclude makings sure USB/serial connections are passed to the VM, and wrong\npython/pip binaries being picked up in Cygwin). WA *should* work on other\nUnix-based systems such as BSD or Mac OS X, but it has not been tested\nin those environments. WA *does not* run on Windows (though it should be\npossible to get limited functionality with minimal porting effort).\n\n.. Note:: If you plan to run Workload Automation on Linux devices only,\n          SSH is required, and Android SDK is optional if you wish\n          to run WA on Android devices at a later time. Then follow the\n          steps to install the necessary python packages to set up WA.\n\n          However, you would be starting off with a limited number of\n          workloads that will run on Linux devices.\n\nAndroid SDK\n-----------\n\nTo interact with Android devices you will need to have the Android SDK\nwith at least one platform installed.\nTo install it, download the ADT Bundle from here_.  Extract it\nand add ``<path_to_android_sdk>/sdk/platform-tools`` and ``<path_to_android_sdk>/sdk/tools``\nto your ``PATH``.  To test that you've installed it properly, run ``adb\nversion``. The output should be similar to this::\n\n        adb version\n        Android Debug Bridge version 1.0.39\n\n.. _here: https://developer.android.com/sdk/index.html\n\nOnce that is working, run ::\n\n        android update sdk\n\nThis will open up a dialog box listing available android platforms and\ncorresponding API levels, e.g. ``Android 4.3 (API 18)``. For WA, you will need\nat least API level 18 (i.e. Android 4.3), though installing the latest is\nusually the best bet.\n\nOptionally (but recommended), you should also set ``ANDROID_HOME`` to point to\nthe install location of the SDK (i.e. ``<path_to_android_sdk>/sdk``).\n\n\nPython\n------\n\nWorkload Automation 3 currently supports Python 3.5+\n\n.. note:: If your system's default python version is still Python 2, please\n          replace the commands listed here with their Python3 equivalent\n          (e.g. python3, pip3 etc.)\n\n.. _pip:\n\npip\n---\n\npip is the recommended package manager for Python. It is not part of standard\nPython distribution and would need to be installed separately. On Ubuntu and\nsimilar distributions, this may be done with APT::\n\n        sudo apt-get install python-pip\n\n.. note:: Some versions of pip (in particluar v1.5.4 which comes with Ubuntu\n          14.04) are know to set the wrong permissions when installing\n          packages, resulting in WA failing to import them. To avoid this it\n          is recommended that you update pip and setuptools before proceeding\n          with installation::\n\n                  sudo -H pip install --upgrade pip\n                  sudo -H pip install --upgrade setuptools\n\n          If you do run into this issue after already installing some packages,\n          you can resolve it by running ::\n\n                  sudo chmod -R a+r /usr/local/lib/python3.X/dist-packages\n                  sudo find /usr/local/lib/python3.X/dist-packages -type d -exec chmod a+x {} \\;\n\n          (The paths above will work for Ubuntu; they may need to be adjusted\n          for other distros).\n\n\nPython Packages\n---------------\n\n.. note:: pip should automatically download and install missing dependencies,\n          so if you're using pip, you can skip this section. However some\n          packages the will be installed have C plugins and will require Python\n          development headers to install. You can get those by installing\n          ``python-dev`` package in apt on Ubuntu (or the equivalent for your\n          distribution).\n\nWorkload Automation 3 depends on the following additional libraries:\n\n  * pexpect\n  * docutils\n  * pySerial\n  * pyYAML\n  * python-dateutil\n  * louie\n  * pandas\n  * devlib\n  * wrapt\n  * requests\n  * colorama\n  * future\n\nYou can install these with pip::\n\n        sudo -H pip install pexpect\n        sudo -H pip install pyserial\n        sudo -H pip install pyyaml\n        sudo -H pip install docutils\n        sudo -H pip install python-dateutil\n        sudo -H pip install devlib\n        sudo -H pip install pandas\n        sudo -H pip install louie\n        sudo -H pip install wrapt\n        sudo -H pip install requests\n        sudo -H pip install colorama\n        sudo -H pip install future\n\nSome of these may also be available in your distro's repositories, e.g. ::\n\n        sudo apt-get install python-serial\n\nDistro package versions tend to be older, so pip installation is recommended.\nHowever, pip will always download and try to build the source, so in some\nsituations distro binaries may provide an easier fall back. Please also note that\ndistro package names may differ from pip packages.\n\n\nOptional Python Packages\n------------------------\n\n.. note:: Unlike the mandatory dependencies in the previous section,\n          pip will *not* install these automatically, so you will have\n          to explicitly install them if/when you need them.\n\nIn addition to the mandatory packages listed in the previous sections, some WA\nfunctionality (e.g. certain plugins) may have additional dependencies. Since\nthey are not necessary to be able to use most of WA, they are not made mandatory\nto simplify initial WA installation. If you try to use an plugin that has\nadditional, unmet dependencies, WA will tell you before starting the run, and\nyou can install it then. They are listed here for those that would rather\ninstall them upfront (e.g. if you're planning to use WA to an environment that\nmay not always have Internet access).\n\n  * nose\n  * mock\n  * daqpower\n  * sphinx\n  * sphinx_rtd_theme\n  * psycopg2-binary\n\n\n\n.. _github:\n\nInstalling\n==========\n\nInstalling the latest released version from PyPI (Python Package Index)::\n\n       sudo -H pip install wlauto\n\nThis will install WA along with its mandatory dependencies. If you would like to\ninstall all optional dependencies at the same time, do the following instead::\n\n       sudo -H pip install wlauto[all]\n\n\nAlternatively, you can also install the latest development version from GitHub\n(you will need git installed for this to work)::\n\n       git clone git@github.com:ARM-software/workload-automation.git workload-automation\n       cd workload-automation\n       sudo -H python setup.py install\n\n.. note:: Please note that if using pip to install from github this will most\n          likely result in an older and incompatible version of devlib being\n          installed alongside WA. If you wish to use pip please also manually\n          install the latest version of\n          `devlib <https://github.com/ARM-software/devlib>`_.\n\n.. note:: Please note that while a `requirements.txt` is included, this is\n          designed to be a reference of known working packages rather to than to\n          be used as part of a standard installation. The version restrictions\n          in place as part of `setup.py` should automatically ensure the correct\n          packages are install however if encountering issues please try\n          updating/downgrading to the package versions list within.\n\n\nIf the above succeeds, try ::\n\n        wa --version\n\nHopefully, this should output something along the lines of ::\n\n        \"Workload Automation version $version\".\n\n.. _dockerfile:\n\nDockerfile\n============\n\nAs an alternative we also provide a Dockerfile that will create an image called\nwadocker, and is preconfigured to run WA and devlib. Please note that the build\nprocess automatically accepts the licenses for the Android SDK, so please be\nsure that you are willing to accept these prior to building and running the\nimage in a container.\n\nThe Dockerfile can be found in the \"extras\" directory or online at\n`<https://github.com/ARM-software /workload- automation/blob/next/extras/Dockerfile>`_\nwhich contains additional information about how to build and to use the file.\n\n\n(Optional) Post Installation\n============================\n\nSome WA plugins have additional dependencies that need to be\nsatisfied before they can be used. Not all of these can be provided with WA and\nso will need to be supplied by the user. They should be placed into\n``~/.workload_automation/dependencies/<extension name>`` so that WA can find\nthem (you may need to create the directory if it doesn't already exist). You\nonly need to provide the dependencies for workloads you want to use.\n\n.. _apk_files:\n\nAPK Files\n---------\n\nAPKs are application packages used by Android. These are necessary to install on\na device when running an :ref:`ApkWorkload <apk-workload>` or derivative. Please\nsee the workload description using the :ref:`show <show-command>` command to see\nwhich version of the apk the UI automation has been tested with and place the\napk in the corresponding workloads dependency directory. Automation may also work\nwith other versions (especially if it's only a minor or revision difference --\nmajor version differences are more likely to contain incompatible UI changes)\nbut this has not been tested. As a general rule we do not guarantee support for\nthe latest version of an app and they are updated on an as needed basis. We do\nhowever attempt to support backwards compatibility with previous major releases\nhowever beyond this support will likely be dropped.\n\n\nGaming Workloads\n----------------\n\nSome workloads (games, demos, etc) cannot be automated using Android's\nUIAutomator framework because they render the entire UI inside a single OpenGL\nsurface. For these, an interaction session needs to be recorded so that it can\nbe played back by WA. These recordings are device-specific, so they would need\nto be done for each device you're planning to use. The tool for doing is\n``revent`` and it is packaged with WA. You can find instructions on how to use\nit in the :ref:`How To <revent_files_creation>` section.\n\nThis is the list of workloads that rely on such recordings:\n\n+------------------+\n| angrybirds_rio   |\n+------------------+\n| templerun2       |\n+------------------+\n\n\n+------------------+\n\n.. _assets_repository:\n\nMaintaining Centralized Assets Repository\n-----------------------------------------\n\nIf there are multiple users within an organization that may need to deploy\nassets for WA plugins, that organization may wish to maintain a centralized\nrepository of assets that individual WA installs will be able to automatically\nretrieve asset files from as they are needed. This repository can be any\ndirectory on a network filer that mirrors the structure of\n``~/.workload_automation/dependencies``, i.e. has a subdirectories named after\nthe plugins which assets they contain. Individual WA installs can then set\n``remote_assets_path`` setting in their config to point to the local mount of\nthat location.\n\n\n(Optional) Uninstalling\n=======================\n\nIf you have installed Workload Automation via ``pip`` and wish to remove it, run this command to\nuninstall it::\n\n    sudo -H pip uninstall wa\n\n.. Note:: This will *not* remove any user configuration (e.g. the ~/.workload_automation directory)\n\n\n(Optional) Upgrading\n====================\n\nTo upgrade Workload Automation to the latest version via ``pip``, run::\n\n    sudo -H pip install --upgrade --no-deps wa\n"
  },
  {
    "path": "doc/source/user_information/user_guide.rst",
    "content": ".. _user-guide:\n\n**********\nUser Guide\n**********\n\nThis guide will show you how to quickly start running workloads using\nWorkload Automation 3.\n\n.. contents:: Contents\n   :depth: 2\n   :local:\n\n---------------------------------------------------------------\n\n\nInstall\n=======\n\n.. note:: This is a quick summary. For more detailed instructions, please see\n          the :ref:`installation` section.\n\nMake sure you have Python 3.5+ and a recent Android SDK with API\nlevel 18 or above installed on your system. A complete install of the Android\nSDK is required, as WA uses a number of its utilities, not just adb. For the\nSDK, make sure that either ``ANDROID_HOME`` environment variable is set, or that\n``adb`` is in your ``PATH``.\n\n.. Note:: If you plan to run Workload Automation on Linux devices only, SSH is required,\n          and Android SDK is optional if you wish to run WA on Android devices at a\n          later time.\n\n          However, you would be starting off with a limited number of workloads that\n          will run on Linux devices.\n\nIn addition to the base Python install, you will also need to have ``pip``\n(Python's package manager) installed as well. This is usually a separate package.\n\nOnce you have those, you can install WA with::\n\n        sudo -H pip install wlauto\n\nThis will install Workload Automation on your system, along with its mandatory\ndependencies.\n\nAlternatively we provide a Dockerfile that which can be used to create a Docker\nimage for running WA along with its dependencies. More information can be found\nin the :ref:`Installation <dockerfile>` section.\n\n(Optional) Verify installation\n-------------------------------\n\nOnce the tarball has been installed, try executing ::\n\n        wa -h\n\nYou should see a help message outlining available subcommands.\n\n\n(Optional) APK files\n--------------------\n\nA large number of WA workloads are installed as APK files. These cannot be\ndistributed with WA and so you will need to obtain those separately.\n\nFor more details, please see the :ref:`installation <apk_files>` section.\n\n\nList Command\n============\n\nIn order to get started with using WA we first we need to find\nout what is available to use. In order to do this we can use the :ref:`list <list-command>`\ncommand followed by the type of plugin that you wish to see.\n\nFor example to see what workloads are available along with a short description\nof each you run::\n\n    wa list workloads\n\nWhich will give an output in the format of:\n\n.. code-block:: none\n\n             adobereader:    The Adobe Reader workflow carries out the following typical\n                             productivity tasks.\n              androbench:    Executes storage performance benchmarks\n          angrybirds_rio:    Angry Birds Rio game.\n                  antutu:    Executes Antutu 3D, UX, CPU and Memory tests\n               applaunch:    This workload launches and measures the launch time of applications\n                             for supporting workloads.\n             benchmarkpi:    Measures the time the target device takes to run and complete the\n                             Pi calculation algorithm.\n               dhrystone:    Runs the Dhrystone benchmark.\n               exoplayer:    Android ExoPlayer\n               geekbench:    Geekbench provides a comprehensive set of benchmarks engineered to\n                             quickly and accurately measure\n                             processor and memory performance.\n            #..\n\nThe same syntax can be used to display ``commands``,\n``energy_instrument_backends``, ``instruments``, ``output_processors``,\n``resource_getters``, ``targets``. Once you have found the plugin you are\nlooking for you can use the :ref:`show <show-command>` command to display more\ndetailed information.  Alternatively please see the\n:ref:`Plugin Reference <plugin-reference>` for an online version.\n\nShow Command\n============\n\nIf you want to learn more information about a particular plugin, such as the\nparameters it supports, you can use the \"show\" command::\n\n    wa show dhrystone\n\nIf you have ``pandoc`` installed on your system, this will display man\npage-like description of the plugin, and the parameters it supports. If you do\nnot have ``pandoc``, you will instead see the same information as raw\nrestructured text.\n\nConfigure Your Device\n=====================\n\nThere are multiple options for configuring your device depending on your\nparticular use case.\n\nYou can either add your configuration to the default configuration file\n``config.yaml``, under the ``$WA_USER_DIRECTORY/`` directory or you can specify it in\nthe ``config`` section of your agenda directly.\n\nAlternatively if you are using multiple devices, you may want to create separate\nconfig files for each of your devices you will be using. This allows you to\nspecify which device you would like to use for a particular run and pass it as\nan argument when invoking with the ``-c`` flag.\n::\n\n    wa run dhrystone -c my_device.yaml\n\nBy default WA will use the “most specific” configuration available for example\nany configuration specified inside an agenda will override a passed\nconfiguration file which will in turn overwrite the default configuration file.\n\n.. note:: For a more information about configuring your\n          device please see :ref:`Setting Up A Device <setting-up-a-device>`.\n\nAndroid\n-------\n\nBy default, the device WA will use is set to 'generic_android'. WA is configured\nto work with a generic Android device through ``adb``. If you only have one\ndevice listed when you execute ``adb devices``, and your device has a standard\nAndroid configuration, then no extra configuration is required.\n\nHowever, if your device is connected via network, you will have to manually\nexecute ``adb connect <device ip>`` (or specify this in your\n:ref:`agenda <agenda>`) so that it appears in the device listing.\n\nIf you have multiple devices connected, you will need to tell WA which one you\nwant it to use. You can do that by setting ``device`` in the device_config section.\n\n.. code-block:: yaml\n\n        # ...\n\n        device_config:\n                device: 'abcdef0123456789'\n                # ...\n        # ...\n\nLinux\n-----\n\nFirst, set the device to 'generic_linux'\n\n.. code-block:: yaml\n\n        # ...\n          device: 'generic_linux'\n        # ...\n\nFind the device_config section and add these parameters\n\n.. code-block:: yaml\n\n        # ...\n\n        device_config:\n                host: '192.168.0.100'\n                username: 'root'\n                password: 'password'\n                # ...\n        # ...\n\nParameters:\n\n- Host is the IP of your target Linux device\n- Username is the user for the device\n- Password is the password for the device\n\nEnabling and Disabling Augmentations\n---------------------------------------\n\nAugmentations are the collective name  for  \"instruments\" and \"output\nprocessors\" in WA3.\n\nSome augmentations are enabled by default after your initial install of WA,\nwhich are specified in the ``config.yaml`` file located in your\n``WA_USER_DIRECTORY``, typically ``~/.workload_autoamation``.\n\n.. note:: Some Linux devices may not be able to run certain augmentations\n          provided by WA (e.g. cpufreq is disabled or unsupported by the\n          device).\n\n.. code-block:: yaml\n\n        # ...\n\n        augmentations:\n            # Records the time it took to run the workload\n            - execution_time\n\n            # Collects /proc/interrupts before and after execution and does a diff.\n            - interrupts\n\n            # Collects the contents of/sys/devices/system/cpu before and after\n            # execution and does a diff.\n            - cpufreq\n\n            # Generate a txt file containing general status information about\n            # which runs failed and which were successful.\n            - status\n\n            # ...\n\nIf you only wanted to keep the 'execution_time' instrument enabled, you can comment out\nthe rest of the list augmentations to disable them.\n\nThis should give you basic functionality. If you are working with a development\nboard or you need some advanced functionality additional configuration may be required.\nPlease see the :ref:`device setup <setting-up-a-device>` section for more details.\n\n.. note:: In WA2 'Instrumentation' and 'Result Processors' were divided up into their\n          own sections in the agenda. In WA3 they now fall under the same category of\n          'augmentations'. For compatibility the old naming structure is still valid\n          however using the new entry names is recommended.\n\n\n\nRunning Your First Workload\n===========================\n\nThe simplest way to run a workload is to specify it as a parameter to WA ``run``\n:ref:`run <run-command>` sub-command::\n\n        wa run dhrystone\n\nYou will see INFO output from WA as it executes each stage of the run. A\ncompleted run output should look something like this::\n\n        INFO     Creating output directory.\n        INFO     Initializing run\n        INFO     Connecting to target\n        INFO     Setting up target\n        INFO     Initializing execution context\n        INFO     Generating jobs\n        INFO         Loading job wk1 (dhrystone) [1]\n        INFO     Installing instruments\n        INFO     Installing output processors\n        INFO     Starting run\n        INFO     Initializing run\n        INFO         Initializing job wk1 (dhrystone) [1]\n        INFO     Running job wk1\n        INFO         Configuring augmentations\n        INFO         Configuring target for job wk1 (dhrystone) [1]\n        INFO         Setting up job wk1 (dhrystone) [1]\n        INFO         Running job wk1 (dhrystone) [1]\n        INFO         Tearing down job wk1 (dhrystone) [1]\n        INFO         Completing job wk1\n        INFO     Job completed with status OK\n        INFO     Finalizing run\n        INFO         Finalizing job wk1 (dhrystone) [1]\n        INFO     Done.\n        INFO     Run duration: 9 seconds\n        INFO     Ran a total of 1 iterations: 1 OK\n        INFO     Results can be found in wa_output\n\n\nOnce the run has completed, you will find a directory called ``wa_output``\nin the location where you have invoked ``wa run``. Within this directory,\nyou will find a \"results.csv\" file which will contain results obtained for\ndhrystone, as well as a \"run.log\" file containing detailed log output for\nthe run. You will also find a sub-directory called 'wk1-dhrystone-1' that\ncontains the results for that iteration. Finally, you will find various additional\ninformation in the ``wa_output/__meta`` subdirectory for example information\nextracted from the target and a copy of the agenda file. The contents of\niteration-specific subdirectories will vary from workload to workload, and,\nalong with the contents of the main output directory, will depend on the\naugmentations that were enabled for that run.\n\nThe ``run`` sub-command takes a number of options that control its behaviour,\nyou can view those by executing ``wa run -h``. Please see the :ref:`invocation`\nsection for details.\n\n\nCreate an Agenda\n================\n\nSimply running a single workload is normally of little use. Typically, you would\nwant to specify several workloads, setup the device state and, possibly, enable\nadditional augmentations. To do this, you would need to create an \"agenda\" for\nthe run that outlines everything you want WA to do.\n\nAgendas are written using YAML_ markup language. A simple agenda might look\nlike this:\n\n.. code-block:: yaml\n\n        config:\n                augmentations:\n                    - ~execution_time\n                    - targz\n                iterations: 2\n        workloads:\n                - memcpy\n                - name: dhrystone\n                  params:\n                        mloops: 5\n                        threads: 1\n\nThis agenda:\n\n- Specifies two workloads: memcpy and dhrystone.\n- Specifies that dhrystone should run in one thread and execute five million loops.\n- Specifies that each of the two workloads should be run twice.\n- Enables the targz output processor, in addition to the output processors enabled in\n  the config.yaml.\n- Disables execution_time instrument, if it is enabled in the config.yaml\n\nAn agenda can be created using WA's ``create`` :ref:`command <using-the-create-command>`\nor in a text editor and saved as a YAML file.\n\nFor more options please see the :ref:`agenda` documentation.\n\n.. _YAML: http://en.wikipedia.org/wiki/YAML\n\n.. _using-the-create-command:\n\nUsing the Create Command\n-------------------------\nThe easiest way to create an agenda is to use the 'create' command. For more\nin-depth information please see the :ref:`Create Command <create-command>` documentation.\n\nIn order to populate the agenda with relevant information you can supply all of\nthe plugins you wish to use as arguments to the command, for example if we want\nto create an agenda file for running ``dhrystone`` on a `generic_android` device and we\nwant to enable the ``execution_time`` and ``trace-cmd`` instruments and display the\nmetrics using the ``csv`` output processor. We would use the following command::\n\n    wa create agenda generic_android dhrystone execution_time trace-cmd csv -o my_agenda.yaml\n\nThis will produce a ``my_agenda.yaml`` file containing all the relevant\nconfiguration for the specified plugins along with their default values as shown\nbelow:\n\n.. code-block:: yaml\n\n        config:\n            augmentations:\n            - execution_time\n            - trace-cmd\n            - csv\n            iterations: 1\n            device: generic_android\n            device_config:\n                adb_server: null\n                adb_port: null\n                big_core: null\n                core_clusters: null\n                core_names: null\n                device: null\n                disable_selinux: true\n                executables_directory: null\n                load_default_modules: true\n                logcat_poll_period: null\n                model: null\n                modules: null\n                package_data_directory: /data/data\n                shell_prompt: !<tag:wa:regex> '8:^.*(shell|root)@.*:/\\S* [#$] '\n                working_directory: null\n            execution_time: {}\n            trace-cmd:\n                buffer_size: null\n                buffer_size_step: 1000\n                events:\n                - sched*\n                - irq*\n                - power*\n                - thermal*\n                functions: null\n                no_install: false\n                report: true\n                report_on_target: false\n                mode: write-to-memory\n            csv:\n                extra_columns: null\n                use_all_classifiers: false\n        workloads:\n        -   name: dhrystone\n            params:\n                cleanup_assets: true\n                delay: 0\n                duration: 0\n                mloops: 0\n                taskset_mask: 0\n                threads: 4\n\n\nRun Command\n============\nThese examples show some useful options that can be used with WA's ``run`` command.\n\nOnce we have created an agenda to use it with WA we can pass it as a argument to\nthe run command e.g.::\n\n    wa run <path/to/agenda> (e.g. wa run ~/myagenda.yaml)\n\nBy default WA will use the \"wa_output\" directory to stores its output however to\nredirect the output to a different directory we can use::\n\n    wa run dhrystone -d my_output_directory\n\nWe can also tell WA to use additional config files by supplying it with\nthe ``-c`` argument. One use case for passing additional config files is if you\nhave multiple devices you wish test with WA, you can store the relevant device\nconfiguration in individual config files and then pass the file corresponding to\nthe device you wish to use for that particular test.\n\n.. note:: As previously mentioned, any more specific configuration present in\n          the agenda file will overwrite the corresponding config parameters\n          specified in the config file(s).\n\n\n::\n\n    wa run -c myconfig.yaml ~/myagenda.yaml\n\nTo use the same output directory but override the existing contents to\nstore new dhrystone results we can specify the ``-f`` argument::\n\n    wa run -f dhrystone\n\nTo display verbose output while running memcpy::\n\n    wa run --verbose memcpy\n\n\n.. _output_directory:\n\nOutput\n======\n\nThe output directory will contain subdirectories for each job that was run,\nwhich will in turn contain the generated metrics and artifacts for each job.\nThe directory will also contain a ``run.log`` file containing the complete log\noutput for the run, and a ``__meta`` directory with the configuration and\nmetadata for the run. Metrics are serialized inside ``result.json`` files inside\neach job's subdirectory. There may also be a ``__failed`` directory containing\nfailed attempts for jobs that have been re-run.\n\nAugmentations may add additional files at the run or job directory level. The\ndefault configuration has ``status`` and ``csv`` augmentations enabled which\ngenerate a ``status.txt`` containing status summary for the run and individual\njobs, and a ``results.csv`` containing metrics from all jobs in a CSV table,\nrespectively.\n\nSee :ref:`output_directory_structure` for more information.\n\nIn order to make it easier to access WA results from scripts, WA provides an API\nthat parses the contents of the output directory:\n\n\n.. code-block:: pycon\n\n    >>> from wa import RunOutput\n    >>> ro = RunOutput('./wa_output')\n    >>> for job in ro.jobs:\n    ...     if job.status != 'OK':\n    ...         print('Job \"{}\" did not complete successfully: {}'.format(job, job.status))\n    ...         continue\n    ...     print('Job \"{}\":'.format(job))\n    ...     for metric in job.metrics:\n    ...         if metric.units:\n    ...             print('\\t{}: {} {}'.format(metric.name, metric.value, metric.units))\n    ...         else:\n    ...             print('\\t{}: {}'.format(metric.name, metric.value))\n    ...\n    Job \"wk1-dhrystone-1\":\n            thread 0 score: 20833333\n            thread 0 DMIPS: 11857\n            thread 1 score: 24509804\n            thread 1 DMIPS: 13950\n            thread 2 score: 18011527\n            thread 2 DMIPS: 10251\n            thread 3 score: 26371308\n            thread 3 DMIPS: 15009\n            time: 1.001251 seconds\n            total DMIPS: 51067\n            total score: 89725972\n            execution_time: 1.4834280014 seconds\n\nSee  :ref:`output_processing_api` for details.\n\nUninstall\n=========\n\nIf you have installed Workload Automation via ``pip``, then run this command to\nuninstall it::\n\n    sudo pip uninstall wa\n\n\n.. Note:: It will *not* remove any user configuration (e.g. the ~/.workload_automation\n          directory).\n\nUpgrade\n=======\n\nTo upgrade Workload Automation to the latest version via ``pip``, run::\n\n    sudo pip install --upgrade --no-deps wa\n\n"
  },
  {
    "path": "doc/source/user_information/user_reference/agenda.rst",
    "content": ".. _agenda-reference:\n\nAgenda\n------\n\n\nAn agenda can be thought of as a way to define an experiment as it specifies\nwhat is to be done during a Workload Automation run. This includes which\nworkloads will be run, with what configuration and which augmentations will be\nenabled, etc. Agenda syntax is designed to be both succinct and expressive and\nis written using YAML notation.\n\nThere are three valid top level entries which are:\n:ref:`config <config-agenda-entry>`, :ref:`workloads <workloads-agenda-entry>`,\n:ref:`sections <sections-agenda-entry>`.\n\nAn example agenda can be seen here:\n\n.. code-block:: yaml\n\n    config:                     # General configuration for the run\n        user_directory: ~/.workload_automation/\n        default_output_directory: 'wa_output'\n        augmentations:          # A list of all augmentations to be enabled and disabled.\n        - trace-cmd\n        - csv\n        - ~dmesg                # Disable the dmseg augmentation\n\n        iterations: 1           # How many iterations to run each workload by default\n\n        device: generic_android\n        device_config:\n            device: R32C801B8XY # The adb name of our device we want to run on\n            disable_selinux: true\n            load_default_modules: true\n            package_data_directory: /data/data\n\n        trace-cmd:              # Provide config for the trace-cmd augmentation.\n            buffer_size_step: 1000\n            events:\n            - sched*\n            - irq*\n            - power*\n            - thermal*\n            no_install: false\n            report: true\n            report_on_target: false\n            mode: write-to-disk\n        csv:                    # Provide config for the csv augmentation\n            use_all_classifiers: true\n\n    sections:                   # Configure what sections we want and their settings\n        - id: LITTLES           # Run workloads just on the LITTLE cores\n          runtime_parameters:   # Supply RT parameters to be used for this section\n                num_little_cores: 4\n                num_big_cores: 0\n\n        - id: BIGS               # Run workloads just on the big cores\n          runtime_parameters:    # Supply RT parameters to be used for this section\n                num_big_cores: 4\n                num_little_cores: 0\n\n    workloads:                  # List which workloads should be run\n    -   name: benchmarkpi\n        augmentations:\n            - ~trace-cmd        # Disable the trace-cmd instrument for this workload\n        iterations: 2           # Override the global number of iteration for this workload\n        params:                 # Specify workload parameters for this workload\n            cleanup_assets: true\n            exact_abi: false\n            force_install: false\n            install_timeout: 300\n            markers_enabled: false\n            prefer_host_package: true\n            strict: false\n            uninstall: false\n    -   dhrystone               # Run the dhrystone workload with all default config\n\nThis agenda will result in a total of 6 jobs being executed on our Android\ndevice, 4 of which running the BenchmarkPi workload with its customized workload\nparameters and 2 running dhrystone with its default configuration. The first 3\nwill be running on only the little cores and the latter running on the big\ncores. For all of the jobs executed the output will be processed by the ``csv``\nprocessor,(plus any additional processors enabled in the default config file),\nhowever trace data will only be collected for the dhrystone jobs.\n\n.. _config-agenda-entry:\n\nconfig\n^^^^^^^\n\nThis section is used to provide overall configuration for WA and its run. The\n``config`` section of an agenda will be merged with any other configuration\nfiles provided (including the default config file) and merged with the most\nspecific configuration taking precedence (see\n:ref:`Config Merging <config-merging>` for more information. The only\nrestriction is that ``run_name`` can only be specified in the config section\nof an agenda as this would not make sense to set as a default.\n\nWithin this section there are multiple distinct types of configuration that can\nbe provided. However in addition to the options listed here all configuration\nthat is available for :ref:`sections <sections-agenda-entry>` can also be entered\nhere and will be globally applied.\n\nConfiguration\n\"\"\"\"\"\"\"\"\"\"\"\"\"\n\nThe first is to configure the behaviour of WA and how a run as a\nwhole will behave. The most common options that that you may want to specify are:\n\n  :device: The name of the 'device' that you wish to perform the run\n           on. This name is a combination of a devlib\n           `Platform <http://devlib.readthedocs.io/en/latest/platform.html>`_ and\n           `Target <http://devlib.readthedocs.io/en/latest/target.html>`_. To\n           see the available options please use ``wa list targets``.\n  :device_config: The is a dict mapping allowing you to configure which target\n                  to connect to  (e.g. ``host`` for an SSH connection or\n                  ``device`` to specific an ADB name) as well as configure other\n                  options for the device for example the ``working_directory``\n                  or the list of ``modules`` to be loaded onto the device. (For\n                  more information please see\n                  :ref:`here <android-general-device-setup>`)\n  :execution_order: Defines the order in which the agenda spec will be executed.\n  :reboot_policy: Defines when during execution of a run a Device will be rebooted.\n  :max_retries: The maximum number of times failed jobs will be retried before giving up.\n  :allow_phone_home: Prevent running any workloads that are marked with ‘phones_home’.\n\nFor more information and a full list of these configuration options please see\n:ref:`Run Configuration <run-configuration>` and\n:ref:`Meta Configuration <meta-configuration>`.\n\n\nPlugins\n\"\"\"\"\"\"\"\n  :augmentations: Specify a list of which augmentations should be enabled (or if\n      prefixed with a ``~``, disabled).\n\n      .. note:: While augmentations can be enabled and disabled on a per workload\n                basis, they cannot yet be re-configured part way through a run and the\n                configuration provided as part of an agenda config section or separate\n                config file will be used for all jobs in a WA run.\n\n  :<plugin_name>: You can also use this section to supply configuration for\n      specific plugins, such as augmentations, workloads, resource getters etc.\n      To do this the plugin name you wish to configure should be provided as an\n      entry in this section and should contain a mapping of configuration\n      options to their desired settings. If configuration is supplied for a\n      plugin that is not currently enabled then it will simply be ignored. This\n      allows for plugins to be temporarily removed without also having to remove\n      their configuration, or to provide a set of defaults for a plugin which\n      can then be overridden.\n\n  :<global_alias>: Some plugins provide global aliases which can set one or more\n      configuration options at once, and these can also be specified here. For\n      example if you specify a value for the entry ``remote_assets_url`` this\n      will set the URL the http resource getter will use when searching for any\n      missing assets.\n\n---------------------------\n\n.. _workloads-agenda-entry:\n\nworkloads\n^^^^^^^^^\n\nHere you can specify a list of workloads to be run. If you wish to run a\nworkload with all default values then you can specify the workload name directly\nas an entry, otherwise a dict mapping should be provided. Any settings provided\nhere will be the most specific and therefore override any other more generalised\nconfiguration for that particular workload spec. The valid entries are as\nfollows:\n\n:workload_name: **(Mandatory)** The name of the workload to be run\n:iterations: Specify how many iterations the workload should be run\n:label: Similar to IDs but do not have the uniqueness restriction.\n    If specified, labels will be used by some output processors instead of (or in\n    addition to) the workload name. For example, the csv output processor will put\n    the label in the \"workload\" column of the CSV file.\n:augmentations: The instruments and output processors to enable (or\n    disabled using a ~) during this workload.\n:classifiers: Classifiers allow you to tag metrics from this workload\n    spec which are often used to help identify what runtime parameters were used\n    when post processing results.\n:workload_parameters: Any parameters to\n    configure that particular workload in a dict form.\n\n    Alias: ``workload_params``\n\n      .. note:: You can see available parameters for a given workload with the\n                :ref:`show command <show-command>` or look it up in the\n                :ref:`Plugin Reference <plugin-reference>`.\n\n:runtime_parameters: A dict mapping of any runtime parameters that should be set\n     for the device for that particular workload. For available\n     parameters please see\n     :ref:`runtime parameters <runtime-parameters>`.\n\n     Alias: ``runtime_parms``\n\n     .. note:: Unless specified elsewhere these configurations will not be\n               undone once the workload has finished. I.e. if the frequency of a\n               core is changed it will remain at that frequency until otherwise\n               changed.\n\n.. note:: There is also a shorter ``params`` alias available, however this alias will be\n          interpreted differently depending on whether it is used in workload\n          entry, in which case it will be interpreted as ``workload_params``, or\n          at global config or section (see below) level, in which case it will\n          be interpreted as ``runtime_params``.\n\n\n---------------------------\n\n.. _sections-agenda-entry:\n\nsections\n^^^^^^^^\n\nSections are used for for grouping sets of configuration together in order to\nreduce the need for duplicated configuration (for more information please see\n:ref:`Sections <sections>`). Each section specified will be applied for each\nentry in the ``workloads`` section. The valid configuration entries are the\nsame as the ``\"workloads\"`` section as mentioned above, except you can\nadditionally specify:\n\n:workloads: An entry which can be provided with the same configuration entries\n    as the :ref:`workloads <workloads-agenda-entry>` top level entry.\n"
  },
  {
    "path": "doc/source/user_information/user_reference/configuration.rst",
    "content": ".. _configuration-specification:\n\n\nConfiguration\n=============\n\n.. include:: user_information/user_reference/agenda.rst\n\n---------------------\n\n.. _run-configuration:\n\nRun Configuration\n------------------\nIn addition to specifying run execution parameters through an agenda, the\nbehaviour of WA can be modified through configuration file(s). The default\nconfiguration file is ``~/.workload_automation/config.yaml``  (the location can\nbe changed by setting ``WA_USER_DIRECTORY`` environment variable, see\n:ref:`envvars` section below). This file will be created when you first run WA\nif it does not already exist. This file must always exist and will always be\nloaded. You can add to or override the contents of that file on invocation of\nWorkload Automation by specifying an additional configuration file using\n``--config`` option. Variables with specific names  will be picked up by the\nframework and used to modify the behaviour of Workload automation e.g.\nthe ``iterations`` variable might be specified to tell WA how many times to run\neach workload.\n\n---------------------\n\n.. _available_settings:\n\n.. include:: run_config/Run_Configuration.rst\n\n---------------------\n\n.. _meta-configuration:\n\nMeta Configuration\n------------------\n\nThere are also a couple of settings are used to provide additional metadata\nfor a run. These may get picked up by instruments or output processors to\nattach context to results.\n\n.. include:: run_config/Meta_Configuration.rst\n\n---------------------\n\n.. _envvars:\n\nEnvironment Variables\n---------------------\n\nIn addition to standard configuration described above, WA behaviour can be\naltered through environment variables. These can determine where WA looks for\nvarious assets when it starts.\n\n:WA_USER_DIRECTORY: This is the location WA will look for config.yaml, plugins,\n   dependencies, and it will also be used for local caches, etc. If this\n   variable is not set, the default location is ``~/.workload_automation`` (this\n   is created when WA is installed).\n\n   .. note:: This location **must** be writable by the user who runs WA.\n\n\n:WA_LOG_BUFFER_CAPACITY: Specifies the capacity (in log records) for the early\n    log handler which is used to buffer log records until a log file becomes\n    available. If the is not set, the default value of ``1000`` will be used.\n    This should sufficient for most scenarios, however this may need to be\n    increased, e.g. if plugin loader scans a very large number of locations;\n    this may also be set to a lower value to reduce WA's memory footprint on\n    memory-constrained hosts.\n\n---------------------\n\n.. include:: user_information/user_reference/runtime_parameters.rst\n\n---------------------\n\n.. _config-merging:\n\nConfiguration Merging\n---------------------\nWA configuration can come from various sources of increasing priority, as well\nas being specified in a generic and specific manner. For example WA's global\nconfig file would be considered the least specific vs the parameters of a\nworkload in an agenda which would be the most specific. WA has two rules for the\npriority of configuration:\n\n    - Configuration from higher priority sources overrides configuration from\n      lower priority sources.\n    - More specific configuration overrides less specific configuration.\n\nThere is a situation where these two rules come into conflict. When a generic\nconfiguration is given in config source of high priority and a specific\nconfiguration is given in a config source of lower priority. In this situation\nit is not possible to know the end users intention and WA will error.\n\nThis functionality allows for defaults for plugins, targets etc. to be\nconfigured at a global level and then seamless overridden without the need to\nremove the high level configuration.\n\nDependent on specificity, configuration parameters from different sources will\nhave different inherent priorities. Within an agenda, the configuration in\n\"workload\" entries will be more specific than \"sections\" entries, which in turn\nare more specific than parameters in the \"config\" entry.\n\n.. _config-include:\n\nConfiguration Includes\n----------------------\n\nIt is possible to include other files in your config files and agendas. This is\ndone by specifying ``include#`` (note the trailing hash) as a key in one of the\nmappings, with the value being the path to the file to be included. The path\nmust be either absolute, or relative to the location of the file it is being\nincluded from (*not* to the current working directory). The path may also\ninclude ``~`` to indicate current user's home directory.\n\nThe include is performed by removing the ``include#`` loading the contents of\nthe specified into the mapping that contained it. In cases where the mapping\nalready contains the key to be loaded, values will be merged using the usual\nmerge method (for overwrites, values in the mapping take precedence over those\nfrom the included files).\n\nBelow is an example of an agenda that includes other files. The assumption is\nthat all of those files are in one directory\n\n.. code-block:: yaml\n\n    # agenda.yaml\n    config:\n       augmentations: [trace-cmd]\n       include#: ./my-config.yaml\n    sections:\n       - include#: ./section1.yaml\n       - include#: ./section2.yaml\n    include#: ./workloads.yaml\n\n.. code-block:: yaml\n\n   # my-config.yaml\n   augmentations: [cpufreq]\n\n\n.. code-block:: yaml\n\n   # section1.yaml\n   runtime_parameters:\n      frequency: max\n\n.. code-block:: yaml\n\n   # section2.yaml\n   runtime_parameters:\n      frequency: min\n\n.. code-block:: yaml\n\n   # workloads.yaml\n   workloads:\n      - dhrystone\n      - memcpy\n\nThe above is equivalent to having a single file like this:\n\n.. code-block:: yaml\n\n    # agenda.yaml\n    config:\n       augmentations: [cpufreq, trace-cmd]\n    sections:\n       - runtime_parameters:\n            frequency: max\n       - runtime_parameters:\n            frequency: min\n    workloads:\n       - dhrystone\n       - memcpy\n\nSome additional details about the implementation and its limitations:\n\n- The ``include#`` *must* be a key in a mapping, and the contents of the\n  included file *must* be a mapping as well; it is not possible to include a\n  list (e.g. in the examples above ``workload:`` part *must* be in the included\n  file.\n- Being a key in a mapping, there can only be one ``include#`` entry per block.\n- The included file *must* have a ``.yaml`` extension.\n- Nested inclusions *are* allowed. I.e. included files may themselves include\n  files; in such cases the included paths must be relative to *that* file, and\n  not the \"main\" file.\n\n"
  },
  {
    "path": "doc/source/user_information/user_reference/invocation.rst",
    "content": ".. _invocation:\n\nCommands\n========\n\nInstalling the wa package will add ``wa`` command to your system,\nwhich you can run from anywhere. This has a number of sub-commands, which can\nbe viewed by executing ::\n\n        wa -h\n\nIndividual sub-commands are discussed in detail below.\n\n.. _run-command:\n\nRun\n---\n\nThe most common sub-command you will use is ``run``. This will run the specified\nworkload(s) and process its resulting output. This takes a single mandatory\nargument which specifies what you want WA to run. This could be either a workload\nname, or a path to an agenda\" file that allows to specify multiple workloads as\nwell as a lot additional configuration (see :ref:`agenda` section for details).\nExecuting ::\n\n        wa run -h\n\nWill display help for this subcommand that will look something like this:\n\n.. code-block:: none\n\n        usage: wa run [-h] [-c CONFIG] [-v] [--version] [-d DIR] [-f] [-i ID]\n              [--disable INSTRUMENT]\n              AGENDA\n\n        Execute automated workloads on a remote device and process the resulting\n        output.\n\n        positional arguments:\n          AGENDA                Agenda for this workload automation run. This defines\n                                which workloads will be executed, how many times, with\n                                which tunables, etc. See example agendas in\n                                /usr/local/lib/python3.X/dist-packages/wa for an\n                                example of how this file should be structured.\n\n        optional arguments:\n          -h, --help            show this help message and exit\n          -c CONFIG, --config CONFIG\n                                specify an additional config.yaml\n          -v, --verbose         The scripts will produce verbose output.\n          --version             show program's version number and exit\n          -d DIR, --output-directory DIR\n                                Specify a directory where the output will be\n                                generated. If the directory already exists, the script\n                                will abort unless -f option (see below) is used, in\n                                which case the contents of the directory will be\n                                overwritten. If this option is not specified, then\n                                wa_output will be used instead.\n          -f, --force           Overwrite output directory if it exists. By default,\n                                the script will abort in this situation to prevent\n                                accidental data loss.\n          -i ID, --id ID        Specify a workload spec ID from an agenda to run. If\n                                this is specified, only that particular spec will be\n                                run, and other workloads in the agenda will be\n                                ignored. This option may be used to specify multiple\n                                IDs.\n          --disable INSTRUMENT  Specify an instrument or output processor to disable\n                                from the command line. This equivalent to adding\n                                \"~{metavar}\" to the instruments list in the\n                                agenda. This can be used to temporarily disable a\n                                troublesome instrument for a particular run without\n                                introducing permanent change to the config (which one\n                                might then forget to revert). This option may be\n                                specified multiple times.\n\n.. _list-command:\n\nList\n----\n\nThis lists all plugins of a particular type. For example ::\n\n        wa list instruments\n\nwill list all instruments currently included in WA. The list will consist of\nplugin names and short descriptions of the functionality they offer e.g.\n\n.. code-block:: none\n\n    #..\n               cpufreq:    Collects dynamic frequency (DVFS) settings before and after\n                           workload execution.\n                 dmesg:    Collected dmesg output before and during the run.\n    energy_measurement:    This instrument is designed to be used as an interface to\n                           the various energy measurement instruments located\n                           in devlib.\n        execution_time:    Measure how long it took to execute the run() methods of\n                           a Workload.\n           file_poller:    Polls the given files at a set sample interval. The values\n                           are output in CSV format.\n                   fps:    Measures Frames Per Second (FPS) and associated metrics for\n                           a workload.\n    #..\n\n\nYou can use the same syntax to quickly display information about ``commands``,\n``energy_instrument_backends``, ``instruments``, ``output_processors``, ``resource_getters``,\n``targets`` and ``workloads``\n\n.. _show-command:\n\nShow\n----\n\nThis will show detailed information about an plugin (workloads, targets,\ninstruments etc.), including a full description and any relevant\nparameters/configuration that are available. For example executing ::\n\n        wa show benchmarkpi\n\nwill produce something like: ::\n\n\n        benchmarkpi\n        -----------\n\n        Measures the time the target device takes to run and complete the Pi\n        calculation algorithm.\n\n        http://androidbenchmark.com/howitworks.php\n\n        from the website:\n\n        The whole idea behind this application is to use the same Pi calculation\n        algorithm on every Android Device and check how fast that process is.\n        Better calculation times, conclude to faster Android devices. This way you\n        can also check how lightweight your custom made Android build is. Or not.\n\n        As Pi is an irrational number, Benchmark Pi does not calculate the actual Pi\n        number, but an approximation near the first digits of Pi over the same\n        calculation circles the algorithms needs.\n\n        So, the number you are getting in milliseconds is the time your mobile device\n        takes to run and complete the Pi calculation algorithm resulting in a\n        approximation of the first Pi digits.\n\n        parameters\n        ~~~~~~~~~~\n\n        cleanup_assets : boolean\n            If ``True``, if assets are deployed as part of the workload they\n            will be removed again from the device as part of finalize.\n\n            default: ``True``\n\n        package_name : str\n            The package name that can be used to specify\n            the workload apk to use.\n\n        install_timeout : integer\n            Timeout for the installation of the apk.\n\n            constraint: ``value > 0``\n\n            default: ``300``\n\n        version : str\n            The version of the package to be used.\n\n        variant : str\n            The variant of the package to be used.\n\n        strict : boolean\n            Whether to throw an error if the specified package cannot be found\n            on host.\n\n        force_install : boolean\n            Always re-install the APK, even if matching version is found already installed\n            on the device.\n\n        uninstall : boolean\n            If ``True``, will uninstall workload's APK as part of teardown.'\n\n        exact_abi : boolean\n            If ``True``, workload will check that the APK matches the target\n            device ABI, otherwise any suitable APK found will be used.\n\n        markers_enabled : boolean\n            If set to ``True``, workloads will insert markers into logs\n            at various points during execution. These markers may be used\n            by other plugins or post-processing scripts to provide\n            measurements or statistics for specific parts of the workload\n            execution.\n\n.. note:: You can also use this command to view global settings by using ``wa show settings``\n\n\n.. _create-command:\n\nCreate\n------\n\nThis aids in the creation of new WA-related objects for example agendas and workloads.\nFor more detailed information on creating workloads please see the\n:ref:`adding a workload <adding-a-workload-example>` section for more details.\n\nAs an example to create an agenda that will run the dhrystone and memcpy workloads\nthat will use the status and hwmon augmentations, run each test 3 times and save\ninto the file ``my_agenda.yaml`` the following command can be used::\n\n        wa create agenda dhrystone memcpy status hwmon -i 3 -o my_agenda.yaml\n\nWhich will produce something like::\n\n        config:\n            augmentations:\n            - status\n            - hwmon\n            status: {}\n            hwmon: {}\n            iterations: 3\n        workloads:\n        -   name: dhrystone\n            params:\n                cleanup_assets: true\n                delay: 0\n                duration: 0\n                mloops: 0\n                taskset_mask: 0\n                threads: 4\n        -   name: memcpy\n            params:\n                buffer_size: 5242880\n                cleanup_assets: true\n                cpus: null\n                iterations: 1000\n\nThis will be populated with default values which can then be customised for the\nparticular use case.\n\nAdditionally the create command can be used to initialize (and update) a\nPostgres database which can be used by the ``postgres`` output processor.\n\nThe most of database connection parameters have a default value however they can\nbe overridden via command line arguments. When initializing the database WA will\nalso save the supplied parameters into the default user config file so that they\ndo not need to be specified time the output processor is used.\n\nAs an example if we had a database server running on at 10.0.0.2 using the\nstandard port we could use the following command to initialize a database for\nuse with WA::\n\n        wa create database -a 10.0.0.2 -u my_username -p Pa55w0rd\n\nThis will log into the database server with the supplied credentials and create\na database (defaulting to 'wa') and will save the configuration to the\n``~/.workload_automation/config.yaml`` file.\n\nWith updates to WA there may be changes to the database schema used. In this\ncase the create command can also be used with the ``-U`` flag to update the\ndatabase to use the new schema as follows::\n\n        wa create database -a 10.0.0.2 -u my_username -p Pa55w0rd -U\n\nThis will upgrade the database sequentially until the database schema is using\nthe latest version.\n\n.. _process-command:\n\nProcess\n--------\n\nThis command allows for output processors to be ran on data that was produced by\na previous run.\n\nThere are 2 ways of specifying which processors you wish to use, either passing\nthem directly as arguments to the process command with the ``--processor``\nargument or by providing an additional config file with the ``--config``\nargument. Please note that by default the process command will not rerun\nprocessors that have already been ran during the run, in order to force a rerun\nof the processors you can specific the ``--force`` argument.\n\nAdditionally if you have a directory containing multiple run directories you can\nspecify the ``--recursive`` argument which will cause WA to walk the specified\ndirectory processing all the WA output sub-directories individually.\n\n\nAs an example if we had performed multiple experiments and have the various WA\noutput directories in our ``my_experiments`` directory, and we now want to process\nthe outputs with a tool that only supports CSV files. We can easily generate CSV\nfiles for all the runs contained in our directory using the CSV processor by\nusing the following command::\n\n      wa process -r -p csv my_experiments\n\n\n.. _record_command:\n\nRecord\n------\n\nThis command simplifies the process of recording revent files. It will\nautomatically deploy revent and has options to automatically open apps and\nrecord specified stages of a workload. Revent allows you to record raw inputs\nsuch as screen swipes or button presses. This can be useful for recording inputs\nfor workloads such as games that don't have XML UI layouts that can be used with\nUIAutomator. As a drawback from this, revent recordings are specific to the\ndevice type they were recorded on. WA uses two parts to the names of revent\nrecordings in the format, ``{device_name}.{suffix}.revent``. - device_name can\neither be specified manually with the ``-d`` argument or it can be automatically\ndetermined. On Android device it will be obtained from ``build.prop``, on Linux\ndevices it is obtained from ``/proc/device-tree/model``. - suffix is used by WA\nto determine which part of the app execution the recording is for, currently\nthese are either ``setup``, ``run``, ``extract_results`` or ``teardown``. All\nstages except ``run`` are optional for playback and to specify which stages\nshould be recorded the ``-s``, ``-r``, ``-e`` or ``-t`` arguments respectively,\nor optionally ``-a`` to indicate all stages should be recorded.\n\n\nThe full set of options for this command are::\n\n        usage: wa record [-h] [-c CONFIG] [-v] [--version] [-d DEVICE] [-o FILE] [-s]\n                         [-r] [-e] [-t] [-a] [-C] [-p PACKAGE | -w WORKLOAD]\n\n        optional arguments:\n          -h, --help            show this help message and exit\n          -c CONFIG, --config CONFIG\n                                specify an additional config.yaml\n          -v, --verbose         The scripts will produce verbose output.\n          --version             show program's version number and exit\n          -d DEVICE, --device DEVICE\n                                Specify the device on which to run. This will take\n                                precedence over the device (if any) specified in\n                                configuration.\n          -o FILE, --output FILE\n                                Specify the output file\n          -s, --setup           Record a recording for setup stage\n          -r, --run             Record a recording for run stage\n          -e, --extract_results Record a recording for extract_results stage\n          -t, --teardown        Record a recording for teardown stage\n          -a, --all             Record recordings for available stages\n          -C, --clear           Clear app cache before launching it\n          -p PACKAGE, --package PACKAGE\n                                Android package to launch before recording\n          -w WORKLOAD, --workload WORKLOAD\n                                Name of a revent workload (mostly games)\n\nFor more information please see :ref:`Revent Recording <revent-recording>`.\n\n.. _replay-command:\n\nReplay\n------\n\nAlongside ``record`` wa also has a command to playback a single recorded revent\nfile. It behaves similar to the ``record`` command taking a subset of the same\noptions allowing you to automatically launch a package on the device ::\n\n    usage: wa replay [-h] [-c CONFIG] [-v] [--debug] [--version] [-p PACKAGE] [-C]\n                 revent\n\n    positional arguments:\n      revent                The name of the file to replay\n\n    optional arguments:\n      -h, --help            show this help message and exit\n      -c CONFIG, --config CONFIG\n                            specify an additional config.py\n      -v, --verbose         The scripts will produce verbose output.\n      --debug               Enable debug mode. Note: this implies --verbose.\n      --version             show program's version number and exit\n      -p PACKAGE, --package PACKAGE\n                            Package to launch before recording\n      -C, --clear           Clear app cache before launching it\n\nFor more information please see :ref:`Revent Replaying  <revent_replaying>`.\n"
  },
  {
    "path": "doc/source/user_information/user_reference/output_directory.rst",
    "content": ".. _output_directory_structure:\n\nOutput Directory Structure\n==========================\n\nThis is an overview of WA output directory structure.\n\n.. note:: In addition to files and subdirectories described here,\n          other content may present in the output directory for\n          a run, depending on the enabled augmentations.\n\nOverview\n--------\n\nThe output directory will contain a subdirectory for every job iteration that\nwas run, as well as some additional entries.  The following diagram illustrates\nthe typical structure of WA output directory::\n\n        wa_output/\n        ├── __meta/\n        │   ├── config.json\n        │   ├── jobs.json\n        │   ├── raw_config\n        │   │   ├── cfg0-config.yaml\n        │   │   └── agenda.yaml\n        │   ├── run_info.json\n        │   └── target_info.json\n        ├── __failed/\n        │   └── wk1-dhrystone-1-attempt1\n        ├── wk1-dhrystone-1/\n        │   └── result.json\n        ├── wk1-dhrystone-2/\n        │   └── result.json\n        ├── wk2-memcpy-1/\n        │   └── result.json\n        ├── wk2-memcpy-2/\n        │   └── result.json\n        ├── result.json\n        └── run.log\n\nThis is the directory structure that would be generated after running two\niterations each of ``dhrystone`` and ``memcpy`` workloads with no augmentations\nenabled, and with the first attempt at the first iteration of dhrystone having\nfailed.\n\nYou may notice that a number of directories named ``wk*-x-x`` were generated in the\noutput directory structure. Each of these directories represents a\n:term:`job`. The name of the output directory is as stated :ref:`here <job_execution_subd>`.\n\n\nOutput Directory Entries\n------------------------\n\nresult.json\n        Contains a JSON structure describing the result of the execution,\n        including collected metrics and artifacts. There will be one for each\n        job execution, and one for the overall run. The run ``result.json`` will\n        only contain metrics/artifacts for the run as a whole, and will not\n        contain results for individual jobs.\n\n        You typically would not access ``result.json`` files directly. Instead\n        you would either enable augmentations to format the results in easier to\n        manage form (such as CSV table), or use :ref:`output_processing_api` to\n        access the results from scripts.\n\n\nrun.log\n        This is a log of everything that happened during the run, including all\n        interactions with the target, and all the decisions made by the\n        framework. The output is equivalent to what you would see on the console\n        when running with ``--verbose`` option.\n\n        .. note:: WA source contains a syntax file for Vim that will color the\n                  initial part of each log line, in a similar way to what you\n                  see on the console. This may be useful for quickly spotting\n                  error and warning messages when scrolling through the log.\n\n                  https://github.com/ARM-software/workload-automation/blob/next/extras/walog.vim\n\n__meta\n        This directory contains configuration and run metadata. See\n        :ref:`config_and_meta` below for details.\n\n__failed\n        This directory will only be present if one or more job executions has\n        failed and were re-run. This directory contains output directories for\n        the failed attempts.\n\n.. _job_execution_subd:\n\njob execution output subdirectory\n        Each subdirectory will be named ``<job id>_<workload label>_<iteration\n        number>``, and will, at minimum, contain a ``result.json`` (see above).\n        Additionally, it may contain raw output from the workload, and any\n        additional artifacts (e.g. traces) generated by augmentations. Finally,\n        if workload execution has failed, WA may gather some additional logging\n        (such as the UI state at the time of failure) and place it here.\n\n\n.. _config_and_meta:\n\nConfiguration and Metadata\n--------------------------\n\nAs stated above, the ``__meta`` directory contains run configuration and\nmetadata.  Typically, you would not access these files directly, but would use\nthe :ref:`output_processing_api` to query the metadata.\n\nFor more details about WA configuration see :ref:`configuration-specification`.\n\nconfig.json\n        Contains the overall run configuration, such as target interface\n        configuration, and job execution order, and various \"meta-configuration\"\n        settings, such as default output path, verbosity level, and logging\n        formatting.\n\njobs.json\n        Final configuration for all jobs, including enabled augmentations,\n        workload and runtime parameters, etc.\n\nraw_config\n        This directory contains copies of config file(s) and the agenda that\n        were parsed in order to generate configuration for this run. Each config\n        file is prefixed with ``cfg<N>-``, where ``<N>`` is the number\n        indicating the order (with respect to the other other config files) in\n        which it was parsed, e.g. ``cfg0-config.yaml`` is always a copy of\n        ``$WA_USER_DIRECTORY/config.yaml``. The one file without a prefix is the\n        agenda.\n\nrun_info.json\n        Run metadata, e.g. duration, start/end timestamps and duration.\n\ntarget_info.json\n        Extensive information about the target. This includes information about\n        the target's CPUS configuration, kernel and userspace versions, etc. The\n        exact content will vary depending on the target type (Android vs Linux)\n        and what could accessed on a particular device (e.g. if\n        ``/proc/config.gz`` exists on the target, the kernel config will be\n        included).\n"
  },
  {
    "path": "doc/source/user_information/user_reference/runtime_parameters.rst",
    "content": ".. _runtime-parameters:\n\nRuntime Parameters\n------------------\n\n.. contents:: Contents\n   :local:\n\nRuntime parameters are options that can be specified to automatically configure\ndevice at runtime. They can be specified at the global level in the agenda or\nfor individual workloads.\n\nExample\n^^^^^^^\nSay we want to perform an experiment on an Android big.LITTLE devices to compare\nthe power consumption between the big and LITTLE clusters running the dhrystone\nand benchmarkpi workloads. Assuming we have additional instrumentation active\nfor this device that can measure the power the device is consuming, to reduce\nexternal factors we want to ensure that the device is in airplane mode turned on\nfor all our tests and the screen is off only for our dhrystone run. We will then\nrun 2 :ref:`sections <sections>` will each enable a single cluster on the\ndevice, set the cores to their maximum frequency and disable all available idle\nstates.\n\n.. code-block:: yaml\n\n        config:\n            runtime_parameters:\n                  airplane_mode: true\n        #..\n        workloads:\n                - name: dhrystone\n                  iterations: 1\n                  runtime_parameters:\n                        screen_on: false\n                        unlock_screen: 'vertical'\n                - name: benchmarkpi\n                  iterations: 1\n        sections:\n                - id: LITTLES\n                  runtime_parameters:\n                        num_little_cores: 4\n                        little_governor: userspace\n                        little_frequency: max\n                        little_idle_states: none\n                        num_big_cores: 0\n\n                - id: BIGS\n                  runtime_parameters:\n                        num_big_cores: 4\n                        big_governor: userspace\n                        big_frequency: max\n                        big_idle_states: none\n                        num_little_cores: 0\n\n\nHotPlug\n^^^^^^^\n\nParameters:\n\n:num_cores: An ``int`` that specifies the total number of cpu cores to be online.\n\n:num_<core_name>_cores: An ``int`` that specifies the total number of that particular core\n                              to be online, the target will be queried and if the core_names can\n                              be determine a parameter for each of the unique core names will be\n                              available.\n\n:cpu<core_no>_online: A ``boolean`` that specifies whether that particular cpu, e.g. cpu0 will\n                            be online.\n\nIf big.LITTLE is detected for the device and additional 2 parameters are available:\n\n:num_big_cores: An ``int`` that specifies the total number of `big` cpu cores to be online.\n\n:num_little_cores: An ``int`` that specifies the total number of `little` cpu cores to be online.\n\n\n\n.. Note:: Please note that if the device in question is operating its own dynamic\n          hotplugging then WA may be unable to set the CPU state or will be overridden.\n          Unfortunately the method of disabling dynamic hot plugging will vary from\n          device to device.\n\n\nCPUFreq\n^^^^^^^\n\n:frequency: An ``int`` that can be used to specify a frequency for all cores if there are common frequencies available.\n\n.. Note:: When settings the frequency, if the governor is not set to userspace then WA will attempt to set the maximum\n          and minimum frequencies to mimic the desired behaviour.\n\n:max_frequency: An ``int`` that can be used to specify a maximum frequency for all cores if there are common frequencies available.\n\n:min_frequency: An ``int`` that can be used to specify a minimum frequency for all cores if there are common frequencies available.\n\n:governor: A ``string`` that can be used to specify the governor for all cores if there are common governors available.\n\n:governor: A ``string`` that can be used to specify the governor for all cores if there are common governors available.\n\n:gov_tunables: A ``dict`` that can be used to specify governor\n                   tunables for all cores, unlike the other common parameters these are not\n                   validated at the beginning of the run therefore incorrect values will cause\n                   an error during runtime.\n\n:<core_name>_frequency: An ``int`` that can be used to specify a frequency for cores of a particular type e.g. 'A72'.\n\n:<core_name>_max_frequency: An ``int`` that can be used to specify a maximum frequency for cores of a particular type e.g. 'A72'.\n\n:<core_name>_min_frequency: An ``int`` that can be used to specify a minimum frequency for cores of a particular type e.g. 'A72'.\n\n:<core_name>_governor: A ``string`` that can be used to specify the governor for cores of a particular type e.g. 'A72'.\n\n:<core_name>_governor: A ``string`` that can be used to specify the governor for cores of a particular type e.g. 'A72'.\n\n:<core_name>_gov_tunables: A ``dict`` that can be used to specify governor\n                         tunables for cores of a particular type e.g. 'A72', these are not\n                         validated at the beginning of the run therefore incorrect values will cause\n                         an error during runtime.\n\n\n:cpu<no>_frequency: An ``int`` that can be used to specify a frequency for a particular core e.g. 'cpu0'.\n\n:cpu<no>_max_frequency: An ``int`` that can be used to specify a maximum frequency for a particular core e.g. 'cpu0'.\n\n:cpu<no>_min_frequency: An ``int`` that can be used to specify a minimum frequency for a particular core e.g. 'cpu0'.\n\n:cpu<no>_governor: A ``string`` that can be used to specify the governor for a particular core e.g. 'cpu0'.\n\n:cpu<no>_governor: A ``string`` that can be used to specify the governor for a particular core e.g. 'cpu0'.\n\n:cpu<no>_gov_tunables: A ``dict`` that can be used to specify governor\n                         tunables for a particular core e.g. 'cpu0', these are not\n                         validated at the beginning of the run therefore incorrect values will cause\n                         an error during runtime.\n\n\nIf big.LITTLE is detected for the device an additional set of parameters are available:\n\n:big_frequency: An ``int`` that can be used to specify a frequency for the big cores.\n\n:big_max_frequency: An ``int`` that can be used to specify a maximum frequency for the big cores.\n\n:big_min_frequency: An ``int`` that can be used to specify a minimum frequency for the big cores.\n\n:big_governor: A ``string`` that can be used to specify the governor for the big cores.\n\n:big_governor: A ``string`` that can be used to specify the governor for the big cores.\n\n:big_gov_tunables: A ``dict`` that can be used to specify governor\n                         tunables for the big cores, these are not\n                         validated at the beginning of the run therefore incorrect values will cause\n                         an error during runtime.\n\n:little_frequency: An ``int`` that can be used to specify a frequency for the little cores.\n\n:little_max_frequency: An ``int`` that can be used to specify a maximum frequency for the little cores.\n\n:little_min_frequency: An ``int`` that can be used to specify a minimum frequency for the little cores.\n\n:little_governor: A ``string`` that can be used to specify the governor for the little cores.\n\n:little_governor: A ``string`` that can be used to specify the governor for the little cores.\n\n:little_gov_tunables: A ``dict`` that can be used to specify governor\n                         tunables for the little cores, these are not\n                         validated at the beginning of the run therefore incorrect values will cause\n                         an error during runtime.\n\n\nCPUIdle\n^^^^^^^\n\n:idle_states: A ``string`` or list of strings which can be used to specify what\n            idles states should be enabled for all cores if there are common\n            idle states available. 'all' and 'none' are also valid entries as a\n            shorthand\n\n:<core_name>_idle_states: A ``string`` or list of strings which can be used to\n                          specify what idles states should be enabled for cores of a particular type\n                          e.g. 'A72'. 'all' and 'none' are also valid entries as a shorthand\n:cpu<no>_idle_states: A ``string`` or list of strings which can be used to\n                      specify what idles states should be enabled for a particular core e.g.\n                      'cpu0'. 'all' and 'none' are also valid entries as a shorthand\n\nIf big.LITTLE is detected for the device and additional set of parameters are available:\n\n:big_idle_states: A ``string`` or list of strings which can be used to specify\n    what idles states should be enabled for the big cores. 'all' and 'none' are\n    also valid entries as a shorthand\n:little_idle_states: A ``string`` or list of strings which can be used to\n    specify what idles states should be enabled for the little cores. 'all' and\n    'none' are also valid entries as a shorthand.\n\n\nAndroid Specific Runtime Parameters\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n:brightness: An ``int`` between 0 and 255 (inclusive) to specify the brightness\n    the screen should be set to. Defaults to ``127``.\n\n:airplane_mode: A ``boolean`` to specify whether airplane mode should be\n    enabled for the device.\n\n:rotation: A ``String`` to specify the screen orientation for the device. Valid\n    entries are ``NATURAL``, ``LEFT``, ``INVERTED``, ``RIGHT``.\n\n:screen_on: A ``boolean`` to specify whether the devices screen should be\n    turned on. Defaults to ``True``.\n\n:unlock_screen: A ``String`` to specify how the devices screen should be\n    unlocked. Unlocking screen is disabled by default. ``vertical``, ``diagonal``\n    and ``horizontal`` are the supported values (see :meth:`devlib.AndroidTarget.swipe_to_unlock`).\n    Note that unlocking succeeds when no passcode is set. Since unlocking screen\n    requires turning on the screen, this option overrides value of ``screen_on``\n    option.\n\n.. _setting-sysfiles:\n\nSetting Sysfiles\n^^^^^^^^^^^^^^^^\nIn order to perform additional configuration of a target the ``sysfile_values``\nruntime parameter can be used. The value for this parameter is a mapping (an\nassociative array, in YAML) of file paths onto values that should be written\ninto those files. ``sysfile_values`` is the only runtime parameter that is\navailable for any (Linux) device. Other runtime parameters will depend on the\nspecifics of the device used (e.g. its CPU cores configuration) as detailed\nabove.\n\n.. note:: By default WA will attempt to verify that the any sysfile values were\n   written correctly by reading the node back and comparing the two values. If\n   you do not wish this check to happen, for example the node you are writing to\n   is write only, you can append an ``!`` to the file path to disable this\n   verification.\n\nFor example the following configuration could be used to enable and verify that cpu0\nis online, however will not attempt to check that its governor have been set to\nuserspace::\n\n                - name: dhrystone\n                runtime_params:\n                      sysfile_values:\n                            /sys/devices/system/cpu/cpu0/online: 1\n                            /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor!: userspace\n"
  },
  {
    "path": "doc/source/user_information/user_reference.rst",
    "content": ".. _user_reference:\n\n***************\nUser Reference\n***************\n\n\n.. contents:: Contents\n   :depth: 2\n   :local:\n\n.. include:: user_information/user_reference/configuration.rst\n\n-------------------\n\n.. include:: user_information/user_reference/invocation.rst\n\n-------------------\n\n.. include:: user_information/user_reference/output_directory.rst\n"
  },
  {
    "path": "doc/source/user_information.rst",
    "content": "================\nUser Information\n================\n\n.. contents:: Contents\n   :depth: 4\n   :local:\n\n.. include:: user_information/installation.rst\n.. include:: user_information/user_guide.rst\n.. include:: user_information/how_to.rst\n.. include:: user_information/user_reference.rst\n"
  },
  {
    "path": "extras/Dockerfile",
    "content": "# This Dockerfile creates an image for use with Workload Automation\n# and/or devlib.\n#\n# To build this Docker image, please run the following command from\n# this directory:\n#\n#   docker build -t wa .\n#\n# This will create an image called wa, which is preconfigured to\n# run WA and devlib. Please note that the build process automatically\n# accepts the licenses for the Android SDK, so please be sure that you\n# are willing to accept these prior to building and running the image\n# in a container.\n#\n# To run the container, please run the following command from the\n# directory you wish to work from:\n#\n#   docker run -it --privileged -v /dev/bus/usb:/dev/bus/usb --volume ${PWD}:/workspace --workdir /workspace wa\n#\n# If using selinux you may need to add the `z` option when mounting\n# volumes e.g.:\n#   --volume ${PWD}:/workspace:z\n# Warning: Please ensure you do not use this option when mounting\n# system directores. For more information please see:\n# https://docs.docker.com/storage/bind-mounts/#configure-the-selinux-label\n#\n# The above command starts the container in privileged mode, with\n# access to USB devices. The current directory is mounted into the\n# image, allowing you to work from there. Any files written to this\n# directory are directly written to the host. Additional \"volumes\",\n# such as required assets, can be mounted into the container using a\n# second --volume command.\n#\n# If you require access to a TTY from the Docker container, please\n# also mount this into the container in the same style as is used to\n# mount USB devices. For example:\n#\n#   docker run -it --privileged -v /dev/ttyUSB0:/dev/ttyUSB0 -v /dev/bus/usb:/dev/bus/usb --volume ${PWD}:/workspace --workdir /workspace wa\n#\n# When you are finished, please run `exit` to leave the container.\n#\n# The relevant environment variables are stored in a separate\n# file which is automatically sourced in an interactive shell.\n# If running from a non-interactive environment this can\n# be manually sourced with `source /home/wa/.wa_environment`\n#\n# NOTE: Please make sure that the ADB server is NOT running on the\n# host. If in doubt, run `adb kill-server` before running the docker\n# container.\n#\n\n# We want to make sure to base this on a recent ubuntu release\nFROM ubuntu:20.04\n\n# Please update the references below to use different versions of\n# devlib, WA or the Android SDK\nARG DEVLIB_REF=v1.3.4\nARG WA_REF=v3.3.1\nARG ANDROID_SDK_URL=https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip\n\n# Set a default timezone to use\nENV TZ=Europe/London\n\nARG DEBIAN_FRONTEND=noninteractive\nRUN apt-get update && apt-get install -y \\\napache2-utils \\\nbison \\\ncmake \\\ncurl \\\nemacs \\\nflex \\\ngit \\\nlibcdk5-dev \\\nlibiio-dev \\\nlibxml2 \\\nlibxml2-dev \\\nlocales \\\nnano \\\nopenjdk-8-jre-headless \\\npython3 \\\npython3-pip \\\nssh \\\nsshpass \\\nsudo \\\ntrace-cmd \\\nusbutils \\\nvim \\\nwget \\\nzip\n\n# Clone and download iio-capture\nRUN git clone -v https://github.com/BayLibre/iio-capture.git /tmp/iio-capture && \\\n    cd /tmp/iio-capture && \\\n    make && \\\n    make install\n\nRUN pip3 install pandas\n\n# Ensure we're using utf-8 as our default encoding\nRUN locale-gen en_US.UTF-8\nENV LANG en_US.UTF-8\nENV LANGUAGE en_US:en\nENV LC_ALL en_US.UTF-8\n\n# Let's get the two repos we need, and install them\nRUN git clone -v https://github.com/ARM-software/devlib.git /tmp/devlib && \\\n    cd /tmp/devlib && \\\n    git checkout $DEVLIB_REF && \\\n    python3 setup.py install && \\\n    pip3 install .[full]\nRUN git clone -v https://github.com/ARM-software/workload-automation.git /tmp/wa && \\\n    cd /tmp/wa && \\\n    git checkout $WA_REF && \\\n    python3 setup.py install && \\\n    pip3 install .[all]\n\n# Clean-up\nRUN rm -R /tmp/devlib /tmp/wa\n\n# Create and switch to the wa user\nRUN useradd -m -G plugdev,dialout wa\nUSER wa\n\n# Let's set up the Android SDK for the user\nRUN mkdir -p /home/wa/.android\nRUN mkdir -p /home/wa/AndroidSDK && cd /home/wa/AndroidSDK && wget $ANDROID_SDK_URL -O sdk.zip && unzip sdk.zip\nRUN cd /home/wa/AndroidSDK/tools/bin && yes | ./sdkmanager --licenses && ./sdkmanager platform-tools && ./sdkmanager 'build-tools;27.0.3'\n\n# Download Monsoon\nRUN mkdir -p /home/wa/monsoon\nRUN curl https://android.googlesource.com/platform/cts/+/master/tools/utils/monsoon.py\\?format\\=TEXT | base64 --decode > /home/wa/monsoon/monsoon.py\nRUN chmod +x /home/wa/monsoon/monsoon.py\n\n# Update WA's required environment variables.\nRUN echo 'export PATH=/home/wa/monsoon:${PATH}' >> /home/wa/.wa_environment\nRUN echo 'export PATH=/home/wa/AndroidSDK/platform-tools:${PATH}' >> /home/wa/.wa_environment\nRUN echo 'export PATH=/home/wa/AndroidSDK/build-tools:${PATH}' >> /home/wa/.wa_environment\nRUN echo 'export ANDROID_HOME=/home/wa/AndroidSDK' >> /home/wa/.wa_environment\n\n# Source WA environment variables in an interactive environment\nRUN echo 'source /home/wa/.wa_environment' >> /home/wa/.bashrc\n\n# Generate some ADB keys. These will change each time the image is build but will otherwise persist.\nRUN /home/wa/AndroidSDK/platform-tools/adb keygen /home/wa/.android/adbkey\n\n# We need to make sure to add the remote assets too\nRUN wa --version && echo 'remote_assets_url: https://raw.githubusercontent.com/ARM-software/workload-automation-assets/master/dependencies' >> /home/wa/.workload_automation/config.yaml\n\n"
  },
  {
    "path": "extras/README",
    "content": "This directory is intended for miscellaneous extra stuff that may be\nuseful while developing Workload Automation. It should *NOT* contain\nanything necessary for *using* workload automation.  Whenever you add\nsomething to this directory, please also add a short description of\nwhat it is in this file.\n\nDockerfile\n        Docker file for generating a Docker image containing WA,\n        devlib, and the required parts of the Android SDK. This can be\n        run in a container to avoid configuring WA on the host. Should\n        work \"out of the box\".\n\npylintrc \n        pylint configuration file set up for WA development (see\n        comment at the top of the file for how to use).\n\nwalog.vim\n        Vim syntax file for WA logs; adds highlighting similar to what\n        comes out in the console. See comment in the file for how to\n        enable it.\n"
  },
  {
    "path": "extras/pylintrc",
    "content": "#\n# pylint configuration for Workload Automation.\n#\n# To install pylint run\n#\n#      sudo apt-get install pylint\n#\n# copy this file to ~/.pylintrc in order for pylint to pick it up.\n# (Or alternatively, specify it with --rcfile option on invocation.)\n#\n# Note: If you're adding something to disable setting, please also add the\n#       explanation of the code in the comment above it. Messages should only\n#       be added here we really don't *ever* care about them. For ignoring\n#       messages on specific lines or in specific files, add the appropriate\n#       pylint disable clause in the source.\n#\n[MASTER]\n\n#profile=no\n\nignore=external\n\n[MESSAGES CONTROL]\n# Disable the following messags:\n# C0301: Line too long (%s/%s)\n# C0103: Invalid name \"%s\" (should match %s)\n# C0111: Missing docstring\n# W0142 - Used * or ** magic\n# R0903: Too few public methods\n# R0904: Too many public methods\n# R0922: Abstract class is only referenced 1 times\n# W0511: TODO Note: this is disabled for a cleaner output, but should be reenabled\n#                   occasionally (through command line argument) to make sure all\n#                   TODO's are addressed, e.g. before a release.\n# W0141: Used builtin function (map|filter)\n# I0011: Locally disabling %s\n# R0921: %s: Abstract class not referenced\n#        Note: this needs to be in the rc file due to a known bug in pylint:\n#              http://www.logilab.org/ticket/111138\n# W1401: nomalous-backslash-in-string, due to:\n#        https://bitbucket.org/logilab/pylint/issue/272/anomalous-backslash-in-string-for-raw\n# C0330: bad continuation, due to:\n#        https://bitbucket.org/logilab/pylint/issue/232/wrong-hanging-indentation-false-positive\n# TODO:  disabling no-value-for-parameter and logging-format-interpolation, as they appear to be broken\n#        in version 1.4.1 and return a lot of false postives; should be re-enabled once fixed.\ndisable=C0301,C0103,C0111,W0142,R0903,R0904,R0922,W0511,W0141,I0011,R0921,W1401,C0330,no-value-for-parameter,logging-format-interpolation,no-else-return,inconsistent-return-statements,keyword-arg-before-vararg,consider-using-enumerate,no-member,super-with-arguments,useless-object-inheritance,raise-missing-from,no-else-raise,no-else-break,no-else-continue\n\n[FORMAT]\nmax-module-lines=4000\n\n[DESIGN]\n\n# We have DeviceConfig classes that are basically just repositories of confuration\n# settings.\nmax-args=30\nmax-attributes=30\n\n\n[SIMILARITIES]\n\nmin-similarity-lines=10\n\n[REPORTS]\n\noutput-format=colorized\n\nreports=no\n\n[IMPORTS]\n\n# Parts of string are not deprecated. Throws too many false positives.\ndeprecated-modules=\n"
  },
  {
    "path": "extras/walog.vim",
    "content": "\" Copy this into ~/.vim/syntax/ and add the following to your ~/.vimrc:\n\"     au BufRead,BufNewFile run.log set filetype=walog\n\"\nif exists(\"b:current_syntax\")\n  finish\nendif\n\nsyn region debugPreamble start='\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d DEBUG' end=':' \nsyn region infoPreamble start='\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d INFO' end=':' \nsyn region warningPreamble start='\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d WARNING' end=':' \nsyn region errorPreamble start='\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d ERROR' end=':' \nsyn region critPreamble start='\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d CRITICAL' end=':' \n\nhi debugPreamble guifg=Blue  ctermfg=DarkBlue\nhi infoPreamble guifg=Green  ctermfg=DarkGreen\nhi warningPreamble guifg=Yellow  ctermfg=178\nhi errorPreamble guifg=Red  ctermfg=DarkRed\nhi critPreamble guifg=Red  ctermfg=DarkRed cterm=bold gui=bold\n\nlet b:current_syntax='walog'\n\n"
  },
  {
    "path": "pytest.ini",
    "content": "[pytest]\nfilterwarnings=\n    ignore::DeprecationWarning:past[.*]\n"
  },
  {
    "path": "requirements.txt",
    "content": "bcrypt==4.0.1\ncertifi==2024.7.4\ncffi==1.15.1\ncharset-normalizer==3.1.0\ncolorama==0.4.6\ncryptography==44.0.1\ndevlib==1.3.4\nfuture==0.18.3\nidna==3.7\nLouie-latest==1.3.1\nlxml==4.9.2\nnose==1.3.7\nnumpy==1.24.3\npandas==2.0.1\nparamiko==3.4.0\npexpect==4.8.0\nptyprocess==0.7.0\npycparser==2.21\nPyNaCl==1.5.0\npyserial==3.5\npython-dateutil==2.8.2\npytz==2023.3\nPyYAML==6.0\nrequests==2.32.4\nscp==0.14.5\nsix==1.16.0\ntzdata==2023.3\nurllib3==2.5.0\nwlauto==3.3.1\nwrapt==1.15.0\n"
  },
  {
    "path": "scripts/cpustates",
    "content": "#!/usr/bin/env python\n#    Copyright 2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom wlauto.utils.power import main\nmain()\n"
  },
  {
    "path": "scripts/wa",
    "content": "#!/usr/bin/env python\n#    Copyright 2013-2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom wa.framework.entrypoint import main\nmain()\n"
  },
  {
    "path": "setup.py",
    "content": "#    Copyright 2013-2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport sys\nimport warnings\nfrom itertools import chain\n\ntry:\n    from setuptools import setup\n    from setuptools.command.sdist import sdist as orig_sdist\nexcept ImportError:\n    from distutils.core import setup\n    from distutils.command.sdist import sdist as orig_sdist\n\n\nwa_dir = os.path.join(os.path.dirname(__file__), 'wa')\n\nsys.path.insert(0, os.path.join(wa_dir, 'framework'))\nfrom version import (get_wa_version, get_wa_version_with_commit,\n                     format_version, required_devlib_version)\n\n# happens if falling back to distutils\nwarnings.filterwarnings('ignore', \"Unknown distribution option: 'install_requires'\")\nwarnings.filterwarnings('ignore', \"Unknown distribution option: 'extras_require'\")\n\ntry:\n    os.remove('MANIFEST')\nexcept OSError:\n    pass\n\npackages = []\ndata_files = {'': [os.path.join(wa_dir, 'commands', 'postgres_schema.sql')]}\nsource_dir = os.path.dirname(__file__)\nfor root, dirs, files in os.walk(wa_dir):\n    rel_dir = os.path.relpath(root, source_dir)\n    data = []\n    if '__init__.py' in files:\n        for f in files:\n            if os.path.splitext(f)[1] not in ['.py', '.pyc', '.pyo']:\n                data.append(f)\n        package_name = rel_dir.replace(os.sep, '.')\n        package_dir = root\n        packages.append(package_name)\n        data_files[package_name] = data\n    else:\n        # use previous package name\n        filepaths = [os.path.join(root, f) for f in files]\n        data_files[package_name].extend([os.path.relpath(f, package_dir) for f in filepaths])\n\nscripts = [os.path.join('scripts', s) for s in os.listdir('scripts')]\n\nwith open(\"README.rst\", \"r\") as fh:\n    long_description = fh.read()\n\ndevlib_version = format_version(required_devlib_version)\nparams = dict(\n    name='wlauto',\n    description='A framework for automating workload execution and measurement collection on ARM devices.',\n    long_description=long_description,\n    version=get_wa_version_with_commit(),\n    packages=packages,\n    package_data=data_files,\n    include_package_data=True,\n    scripts=scripts,\n    url='https://github.com/ARM-software/workload-automation',\n    license='Apache v2',\n    maintainer='ARM Architecture & Technology Device Lab',\n    maintainer_email='workload-automation@arm.com',\n    python_requires='>= 3.7',\n    setup_requires=[\n        'numpy<=1.16.4; python_version<\"3\"',\n        'numpy; python_version>=\"3\"',\n    ],\n    install_requires=[\n        'python-dateutil',  # converting between UTC and local time.\n        'pexpect>=3.3',  # Send/receive to/from device\n        'pyserial',  # Serial port interface\n        'colorama',  # Printing with colors\n        'pyYAML>=5.1b3',  # YAML-formatted agenda parsing\n        'requests',  # Fetch assets over HTTP\n        'devlib>={}'.format(devlib_version),  # Interacting with devices\n        'louie-latest',  # callbacks dispatch\n        'wrapt',  # better decorators\n        'pandas>=0.23.0,<=0.24.2; python_version<\"3.5.3\"',  # Data analysis and manipulation\n        'pandas>=0.23.0; python_version>=\"3.5.3\"',  # Data analysis and manipulation\n        'future',  # Python 2-3 compatiblity\n    ],\n    dependency_links=['https://github.com/ARM-software/devlib/tarball/master#egg=devlib-{}'.format(devlib_version)],\n    extras_require={\n        'test': ['nose', 'mock'],\n        'notify': ['notify2'],\n        'doc': ['sphinx', 'sphinx_rtd_theme'],\n        'postgres': ['psycopg2-binary'],\n        'daq': ['daqpower'],\n    },\n    # https://pypi.python.org/pypi?%3Aaction=list_classifiers\n    classifiers=[\n        'Development Status :: 5 - Production/Stable',\n        'Environment :: Console',\n        'License :: OSI Approved :: Apache Software License',\n        'Operating System :: POSIX :: Linux',\n        'Programming Language :: Python :: 3',\n    ],\n)\n\nall_extras = list(chain(iter(params['extras_require'].values())))\nparams['extras_require']['all'] = all_extras\n\n\nclass sdist(orig_sdist):\n\n    user_options = orig_sdist.user_options + [\n        ('strip-commit', 's',\n         \"Strip git commit hash from package version \")\n    ]\n\n    def initialize_options(self):\n        orig_sdist.initialize_options(self)\n        self.strip_commit = False\n\n    def run(self):\n        if self.strip_commit:\n            self.distribution.get_version = get_wa_version\n        orig_sdist.run(self)\n\n\nparams['cmdclass'] = {'sdist': sdist}\n\nsetup(**params)\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ci/idle_agenda.yaml",
    "content": "config:\n    iterations: 1\n    augmentations:\n        - ~~\n        - status\n    device: generic_local\n    device_config:\n        big_core: null\n        core_clusters: null\n        core_names: null\n        executables_directory: null\n        keep_password: true\n        load_default_modules: false\n        model: null\n        modules: null\n        password: null\n        shell_prompt: !<tag:wa:regex> '40:^.*(shell|root|juno)@?.*:[/~]\\S* *[#$] '\n        unrooted: True\n        working_directory: null\nworkloads:\n-   name: idle\n    params:\n        duration: 1\n"
  },
  {
    "path": "tests/data/bad-syntax-agenda.yaml",
    "content": "config:\n      # tab on the following line\n          reboot_policy: never\n  workloads:\n      - antutu\n\n"
  },
  {
    "path": "tests/data/extensions/devices/test_device.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa import Plugin\n\n\nclass MockDevice(Plugin):\n\n    name = 'test-device'\n    kind = 'device'\n\n    def __init__(self, *args, **kwargs):\n        self.boot_called = 0\n        self.push_file_called = 0\n        self.pull_file_called = 0\n        self.execute_called = 0\n        self.set_sysfile_int_called = 0\n        self.close_called = 0\n\n    def boot(self):\n        self.boot_called += 1\n\n    def push_file(self, source, dest):\n        self.push_file_called += 1\n\n    def pull_file(self, source, dest):\n        self.pull_file_called += 1\n\n    def execute(self, command):\n        self.execute_called += 1\n\n    def set_sysfile_int(self, file, value):\n        self.set_sysfile_int_called += 1\n\n    def close(self, command):\n        self.close_called += 1\n"
  },
  {
    "path": "tests/data/includes/agenda.yaml",
    "content": "config:\n    augmentations: [~execution_time]\n    include#: configs/test.yaml\nsections:\n    - include#: sections/section1.yaml\n    - include#: sections/section2.yaml\ninclude#: workloads.yaml\n"
  },
  {
    "path": "tests/data/includes/configs/test.yaml",
    "content": "augmentations: [cpufreq, trace-cmd]\n"
  },
  {
    "path": "tests/data/includes/section-include.yaml",
    "content": "classifiers:\n    included: true\n"
  },
  {
    "path": "tests/data/includes/sections/section1.yaml",
    "content": "classifiers: {'section': 'one'}\n"
  },
  {
    "path": "tests/data/includes/sections/section2.yaml",
    "content": "classifiers: {'section': 'two'}\ninclude#: ../section-include.yaml\n"
  },
  {
    "path": "tests/data/includes/user/config.yaml",
    "content": "augmentations: [execution_time]\n\n"
  },
  {
    "path": "tests/data/includes/workloads.yaml",
    "content": "workloads:\n    - dhrystone\n    - name: memcpy\n      classifiers:\n          memcpy: True\n"
  },
  {
    "path": "tests/data/interrupts/after",
    "content": "           CPU0       CPU1       CPU2       CPU3       CPU4       CPU5       CPU6       CPU7       \n 65:          0          0          0          0          0          0          0          0       GIC  dma-pl330.2\n 66:          0          0          0          0          0          0          0          0       GIC  dma-pl330.0\n 67:          0          0          0          0          0          0          0          0       GIC  dma-pl330.1\n 74:          0          0          0          0          0          0          0          0       GIC  s3c2410-wdt\n 85:          2          0          0          0          0          0          0          0       GIC  exynos4210-uart\n 89:        368          0          0          0          0          0          0          0       GIC  s3c2440-i2c.1\n 90:          0          0          0          0          0          0          0          0       GIC  s3c2440-i2c.2\n 92:       1294          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.0\n 95:        831          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.3\n103:          1          0          0          0          0          0          0          0       GIC  ehci_hcd:usb1, ohci_hcd:usb2\n104:       7304          0          0          0          0          0          0          0       GIC  xhci_hcd:usb3, exynos-ss-udc.0\n105:          0          0          0          0          0          0          0          0       GIC  xhci_hcd:usb5\n106:          0          0          0          0          0          0          0          0       GIC  mali.0\n107:      16429          0          0          0          0          0          0          0       GIC  dw-mci\n108:          1          0          0          0          0          0          0          0       GIC  dw-mci\n109:          0          0          0          0          0          0          0          0       GIC  dw-mci\n114:      28074          0          0          0          0          0          0          0       GIC  mipi-dsi\n117:          0          0          0          0          0          0          0          0       GIC  exynos-gsc\n118:          0          0          0          0          0          0          0          0       GIC  exynos-gsc\n121:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx\n123:          7          0          0          0          0          0          0          0       GIC  s5p-fimg2d\n126:          0          0          0          0          0          0          0          0       GIC  s5p-mixer\n127:          0          0          0          0          0          0          0          0       GIC  hdmi-int\n128:          0          0          0          0          0          0          0          0       GIC  s5p-mfc-v6\n142:          0          0          0          0          0          0          0          0       GIC  dma-pl330.3\n146:          0          0          0          0          0          0          0          0       GIC  s5p-tvout-cec\n149:       1035          0          0          0          0          0          0          0       GIC  mali.0\n152:      26439          0          0          0          0          0          0          0       GIC  mct_tick0\n153:          0       2891          0          0          0          0          0          0       GIC  mct_tick1\n154:          0          0       3969          0          0          0          0          0       GIC  mct_tick2\n155:          0          0          0       2385          0          0          0          0       GIC  mct_tick3\n160:          0          0          0          0       8038          0          0          0       GIC  mct_tick4\n161:          0          0          0          0          0       8474          0          0       GIC  mct_tick5\n162:          0          0          0          0          0          0       7842          0       GIC  mct_tick6\n163:          0          0          0          0          0          0          0       7827       GIC  mct_tick7\n200:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx\n201:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.29\n218:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.25\n220:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.27\n224:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.19\n251:        320          0          0          0          0          0          0          0       GIC  mali.0\n252:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n253:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n254:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n272:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.5\n274:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.6\n280:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.11\n282:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.30\n284:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.12\n286:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.17\n288:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.4\n290:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.20\n294:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n296:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n298:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n300:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n302:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n306:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0\n316:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.2\n325:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0\n332:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n340:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n342:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n344:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n405:        327          0          0          0          0          0          0          0  combiner  s3c_fb\n409:          0          0          0          0          0          0          0          0  combiner  mcuctl\n414:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.28\n434:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.22\n436:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.23\n438:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.26\n443:         12          0          0          0          0          0          0          0  combiner  mct_comp_irq\n446:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.21\n449:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.13\n453:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.15\n474:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.24\n512:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_POWER\n518:          0          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus\n524:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_HOMEPAGE\n526:          1          0          0          0          0          0          0          0  exynos-eint  HOST_DETECT\n527:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id\n531:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus\n532:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id\n537:          3          0          0          0          0          0          0          0  exynos-eint  mxt540e_ts\n538:          0          0          0          0          0          0          0          0  exynos-eint  sec-pmic-irq\n543:          1          0          0          0          0          0          0          0  exynos-eint  hdmi-ext\n544:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEDOWN\n545:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEUP\n546:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_MENU\n547:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_BACK\n655:          0          0          0          0          0          0          0          0  sec-pmic  rtc-alarm0\nIPI0:          0          0          0          0          0          0          0          0  Timer broadcast interrupts\nIPI1:       8823       7185       4642       5652       2370       2069       1452       1351  Rescheduling interrupts\nIPI2:          4          7          8          6          8          7          8          8  Function call interrupts\nIPI3:          1          0          0          0          0          0          0          0  Single function call interrupts\nIPI4:          0          0          0          0          0          0          0          0  CPU stop interrupts\nIPI5:          0          0          0          0          0          0          0          0  CPU backtrace\nErr:          0\n"
  },
  {
    "path": "tests/data/interrupts/before",
    "content": "           CPU0       CPU1       CPU2       CPU3       CPU4       CPU5       CPU6       CPU7       \n 65:          0          0          0          0          0          0          0          0       GIC  dma-pl330.2\n 66:          0          0          0          0          0          0          0          0       GIC  dma-pl330.0\n 67:          0          0          0          0          0          0          0          0       GIC  dma-pl330.1\n 74:          0          0          0          0          0          0          0          0       GIC  s3c2410-wdt\n 85:          2          0          0          0          0          0          0          0       GIC  exynos4210-uart\n 89:        368          0          0          0          0          0          0          0       GIC  s3c2440-i2c.1\n 90:          0          0          0          0          0          0          0          0       GIC  s3c2440-i2c.2\n 92:       1204          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.0\n 95:        831          0          0          0          0          0          0          0       GIC  exynos5-hs-i2c.3\n103:          1          0          0          0          0          0          0          0       GIC  ehci_hcd:usb1, ohci_hcd:usb2\n104:       7199          0          0          0          0          0          0          0       GIC  xhci_hcd:usb3, exynos-ss-udc.0\n105:          0          0          0          0          0          0          0          0       GIC  xhci_hcd:usb5\n106:          0          0          0          0          0          0          0          0       GIC  mali.0\n107:      16429          0          0          0          0          0          0          0       GIC  dw-mci\n108:          1          0          0          0          0          0          0          0       GIC  dw-mci\n109:          0          0          0          0          0          0          0          0       GIC  dw-mci\n114:      26209          0          0          0          0          0          0          0       GIC  mipi-dsi\n117:          0          0          0          0          0          0          0          0       GIC  exynos-gsc\n118:          0          0          0          0          0          0          0          0       GIC  exynos-gsc\n121:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx\n123:          7          0          0          0          0          0          0          0       GIC  s5p-fimg2d\n126:          0          0          0          0          0          0          0          0       GIC  s5p-mixer\n127:          0          0          0          0          0          0          0          0       GIC  hdmi-int\n128:          0          0          0          0          0          0          0          0       GIC  s5p-mfc-v6\n142:          0          0          0          0          0          0          0          0       GIC  dma-pl330.3\n146:          0          0          0          0          0          0          0          0       GIC  s5p-tvout-cec\n149:       1004          0          0          0          0          0          0          0       GIC  mali.0\n152:      26235          0          0          0          0          0          0          0       GIC  mct_tick0\n153:          0       2579          0          0          0          0          0          0       GIC  mct_tick1\n154:          0          0       3726          0          0          0          0          0       GIC  mct_tick2\n155:          0          0          0       2262          0          0          0          0       GIC  mct_tick3\n161:          0          0          0          0          0       2554          0          0       GIC  mct_tick5\n162:          0          0          0          0          0          0       1911          0       GIC  mct_tick6\n163:          0          0          0          0          0          0          0       1928       GIC  mct_tick7\n200:          0          0          0          0          0          0          0          0       GIC  exynos5-jpeg-hx\n201:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.29\n218:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.25\n220:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.27\n224:          0          0          0          0          0          0          0          0       GIC  exynos-sysmmu.19\n251:        312          0          0          0          0          0          0          0       GIC  mali.0\n252:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n253:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n254:          0          0          0          0          0          0          0          0       GIC  exynos5-scaler\n272:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.5\n274:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.6\n280:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.11\n282:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.30\n284:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.12\n286:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.17\n288:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.4\n290:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.20\n294:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n296:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n298:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n300:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n302:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n306:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0\n316:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.2\n325:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.0\n332:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n340:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n342:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.9\n344:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.16\n405:        322          0          0          0          0          0          0          0  combiner  s3c_fb\n409:          0          0          0          0          0          0          0          0  combiner  mcuctl\n414:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.28\n434:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.22\n436:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.23\n438:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.26\n443:         12          0          0          0          0          0          0          0  combiner  mct_comp_irq\n446:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.21\n449:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.13\n453:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.15\n474:          0          0          0          0          0          0          0          0  combiner  exynos-sysmmu.24\n512:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_POWER\n518:          0          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus\n524:          0          0          0          0          0          0          0          0  exynos-eint  gpio-keys: KEY_HOMEPAGE\n526:          1          0          0          0          0          0          0          0  exynos-eint  HOST_DETECT\n527:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id\n531:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_vbus\n532:          1          0          0          0          0          0          0          0  exynos-eint  drd_switch_id\n537:          3          0          0          0          0          0          0          0  exynos-eint  mxt540e_ts\n538:          0          0          0          0          0          0          0          0  exynos-eint  sec-pmic-irq\n543:          1          0          0          0          0          0          0          0  exynos-eint  hdmi-ext\n544:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEDOWN\n545:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_VOLUMEUP\n546:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_MENU\n547:          0          0          0          0          0          0          0          0  s5p_gpioint  gpio-keys: KEY_BACK\n655:          0          0          0          0          0          0          0          0  sec-pmic  rtc-alarm0\nIPI0:          0          0          0          0          0          0          0          0  Timer broadcast interrupts\nIPI1:       8751       7147       4615       5623       2334       2066       1449       1348  Rescheduling interrupts\nIPI2:          3          6          7          6          7          6          7          7  Function call interrupts\nIPI3:          1          0          0          0          0          0          0          0  Single function call interrupts\nIPI4:          0          0          0          0          0          0          0          0  CPU stop interrupts\nIPI5:          0          0          0          0          0          0          0          0  CPU backtrace\nErr:          0\n"
  },
  {
    "path": "tests/data/interrupts/result",
    "content": "        CPU0 CPU1 CPU2 CPU3 CPU4 CPU5 CPU6 CPU7                                      \n    65:    0    0    0    0    0    0    0    0                       GIC dma-pl330.2\n    66:    0    0    0    0    0    0    0    0                       GIC dma-pl330.0\n    67:    0    0    0    0    0    0    0    0                       GIC dma-pl330.1\n    74:    0    0    0    0    0    0    0    0                       GIC s3c2410-wdt\n    85:    0    0    0    0    0    0    0    0                   GIC exynos4210-uart\n    89:    0    0    0    0    0    0    0    0                     GIC s3c2440-i2c.1\n    90:    0    0    0    0    0    0    0    0                     GIC s3c2440-i2c.2\n    92:   90    0    0    0    0    0    0    0                  GIC exynos5-hs-i2c.0\n    95:    0    0    0    0    0    0    0    0                  GIC exynos5-hs-i2c.3\n   103:    0    0    0    0    0    0    0    0      GIC ehci_hcd:usb1, ohci_hcd:usb2\n   104:  105    0    0    0    0    0    0    0    GIC xhci_hcd:usb3, exynos-ss-udc.0\n   105:    0    0    0    0    0    0    0    0                     GIC xhci_hcd:usb5\n   106:    0    0    0    0    0    0    0    0                            GIC mali.0\n   107:    0    0    0    0    0    0    0    0                            GIC dw-mci\n   108:    0    0    0    0    0    0    0    0                            GIC dw-mci\n   109:    0    0    0    0    0    0    0    0                            GIC dw-mci\n   114: 1865    0    0    0    0    0    0    0                          GIC mipi-dsi\n   117:    0    0    0    0    0    0    0    0                        GIC exynos-gsc\n   118:    0    0    0    0    0    0    0    0                        GIC exynos-gsc\n   121:    0    0    0    0    0    0    0    0                   GIC exynos5-jpeg-hx\n   123:    0    0    0    0    0    0    0    0                        GIC s5p-fimg2d\n   126:    0    0    0    0    0    0    0    0                         GIC s5p-mixer\n   127:    0    0    0    0    0    0    0    0                          GIC hdmi-int\n   128:    0    0    0    0    0    0    0    0                        GIC s5p-mfc-v6\n   142:    0    0    0    0    0    0    0    0                       GIC dma-pl330.3\n   146:    0    0    0    0    0    0    0    0                     GIC s5p-tvout-cec\n   149:   31    0    0    0    0    0    0    0                            GIC mali.0\n   152:  204    0    0    0    0    0    0    0                         GIC mct_tick0\n   153:    0  312    0    0    0    0    0    0                         GIC mct_tick1\n   154:    0    0  243    0    0    0    0    0                         GIC mct_tick2\n   155:    0    0    0  123    0    0    0    0                         GIC mct_tick3\n>  160:    0    0    0    0 8038    0    0    0                         GIC mct_tick4\n   161:    0    0    0    0    0 5920    0    0                         GIC mct_tick5\n   162:    0    0    0    0    0    0 5931    0                         GIC mct_tick6\n   163:    0    0    0    0    0    0    0 5899                         GIC mct_tick7\n   200:    0    0    0    0    0    0    0    0                   GIC exynos5-jpeg-hx\n   201:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.29\n   218:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.25\n   220:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.27\n   224:    0    0    0    0    0    0    0    0                  GIC exynos-sysmmu.19\n   251:    8    0    0    0    0    0    0    0                            GIC mali.0\n   252:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler\n   253:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler\n   254:    0    0    0    0    0    0    0    0                    GIC exynos5-scaler\n   272:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.5\n   274:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.6\n   280:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.11\n   282:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.30\n   284:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.12\n   286:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.17\n   288:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.4\n   290:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.20\n   294:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9\n   296:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9\n   298:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9\n   300:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9\n   302:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16\n   306:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.0\n   316:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.2\n   325:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.0\n   332:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16\n   340:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16\n   342:    0    0    0    0    0    0    0    0              combiner exynos-sysmmu.9\n   344:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.16\n   405:    5    0    0    0    0    0    0    0                       combiner s3c_fb\n   409:    0    0    0    0    0    0    0    0                       combiner mcuctl\n   414:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.28\n   434:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.22\n   436:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.23\n   438:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.26\n   443:    0    0    0    0    0    0    0    0                 combiner mct_comp_irq\n   446:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.21\n   449:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.13\n   453:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.15\n   474:    0    0    0    0    0    0    0    0             combiner exynos-sysmmu.24\n   512:    0    0    0    0    0    0    0    0      exynos-eint gpio-keys: KEY_POWER\n   518:    0    0    0    0    0    0    0    0           exynos-eint drd_switch_vbus\n   524:    0    0    0    0    0    0    0    0   exynos-eint gpio-keys: KEY_HOMEPAGE\n   526:    0    0    0    0    0    0    0    0               exynos-eint HOST_DETECT\n   527:    0    0    0    0    0    0    0    0             exynos-eint drd_switch_id\n   531:    0    0    0    0    0    0    0    0           exynos-eint drd_switch_vbus\n   532:    0    0    0    0    0    0    0    0             exynos-eint drd_switch_id\n   537:    0    0    0    0    0    0    0    0                exynos-eint mxt540e_ts\n   538:    0    0    0    0    0    0    0    0              exynos-eint sec-pmic-irq\n   543:    0    0    0    0    0    0    0    0                  exynos-eint hdmi-ext\n   544:    0    0    0    0    0    0    0    0 s5p_gpioint gpio-keys: KEY_VOLUMEDOWN\n   545:    0    0    0    0    0    0    0    0   s5p_gpioint gpio-keys: KEY_VOLUMEUP\n   546:    0    0    0    0    0    0    0    0       s5p_gpioint gpio-keys: KEY_MENU\n   547:    0    0    0    0    0    0    0    0       s5p_gpioint gpio-keys: KEY_BACK\n   655:    0    0    0    0    0    0    0    0                   sec-pmic rtc-alarm0\n  IPI0:    0    0    0    0    0    0    0    0            Timer broadcast interrupts\n  IPI1:   72   38   27   29   36    3    3    3               Rescheduling interrupts\n  IPI2:    1    1    1    0    1    1    1    1              Function call interrupts\n  IPI3:    0    0    0    0    0    0    0    0       Single function call interrupts\n  IPI4:    0    0    0    0    0    0    0    0                   CPU stop interrupts\n  IPI5:    0    0    0    0    0    0    0    0                         CPU backtrace\n   Err:    0                                                                         \n"
  },
  {
    "path": "tests/data/logcat.2.log",
    "content": "--------- beginning of /dev/log/main\r\nD/TextView( 2468): 7:07\r\nD/TextView( 2468): 7:07\r\nD/TextView( 2468): Thu, June 27\r\n--------- beginning of /dev/log/system\r\nD/TextView( 3099): CaffeineMark results\r\nD/TextView( 3099): Overall score:\r\nD/TextView( 3099): Rating\r\nD/TextView( 3099): Rank\r\nD/TextView( 3099): 0\r\nD/TextView( 3099): Details\r\nD/TextView( 3099): Publish\r\nD/TextView( 3099): Top 10\r\nD/TextView( 3099): 3672\r\n"
  },
  {
    "path": "tests/data/logcat.log",
    "content": "--------- beginning of /dev/log/main\n--------- beginning of /dev/log/system\nD/TextView( 2462): 5:05\nD/TextView( 2462): 5:05\nD/TextView( 2462): Mon, June 24\nD/TextView( 3072): Stop Test\nD/TextView( 3072): Testing CPU and memory…\nD/TextView( 3072): 0%\nD/TextView( 3072): Testing CPU and memory…\n\n"
  },
  {
    "path": "tests/data/test-agenda.yaml",
    "content": "global: \n        iterations: 8\n        boot_parameters:\n                os_mode: mp_a15_bootcluster\n        runtime_parameters:\n                a7_governor: Interactive\n                a15_governor: Interactive2\n                a7_cores: 3\n                a15_cores: 2\nworkloads:\n        - id: 1c\n          workload_name: exoplayer\n        - id: 1d\n          workload_name: exoplayer\n          runtime_parameters:\n                os_mode: mp_a7_only\n                a7_cores: 0\n          iterations: 4\n        - id: 1e\n          workload_name: benchmarkpi\n        - id: 1f\n          workload_name: antutu\n          runtime_parameters:\n                a7_cores: 1\n                a15_cores: 1\n"
  },
  {
    "path": "tests/data/test-config.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\ndevice = 'TEST'\n"
  },
  {
    "path": "tests/test_agenda_parser.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=E0611\n# pylint: disable=R0201\nimport os\nimport sys\nfrom collections import defaultdict\nfrom unittest import TestCase\n\nfrom nose.tools import assert_equal, assert_in, raises, assert_true\n\n\nDATA_DIR = os.path.join(os.path.dirname(__file__), 'data')\nos.environ['WA_USER_DIRECTORY'] = os.path.join(DATA_DIR, 'includes')\n\nfrom wa.framework.configuration.execution import ConfigManager\nfrom wa.framework.configuration.parsers import AgendaParser\nfrom wa.framework.exception import ConfigError\nfrom wa.utils.serializer import yaml\nfrom wa.utils.types import reset_all_counters\n\n\nYAML_TEST_FILE = os.path.join(DATA_DIR, 'test-agenda.yaml')\nYAML_BAD_SYNTAX_FILE = os.path.join(DATA_DIR, 'bad-syntax-agenda.yaml')\nINCLUDES_TEST_FILE = os.path.join(DATA_DIR, 'includes', 'agenda.yaml')\n\ninvalid_agenda_text = \"\"\"\nworkloads:\n    - id: 1\n      workload_parameters:\n          test: 1\n\"\"\"\n\nduplicate_agenda_text = \"\"\"\nglobal:\n    iterations: 1\nworkloads:\n    - id: 1\n      workload_name: antutu\n      workload_parameters:\n          test: 1\n    - id: \"1\"\n      workload_name: benchmarkpi\n\"\"\"\n\nshort_agenda_text = \"\"\"\nworkloads: [antutu, dhrystone, benchmarkpi]\n\"\"\"\n\ndefault_ids_agenda_text = \"\"\"\nworkloads:\n    - antutu\n    - id: wk1\n      name: benchmarkpi\n    - id: test\n      name: dhrystone\n      params:\n          cpus: 1\n    - vellamo\n\"\"\"\n\nsectioned_agenda_text = \"\"\"\nsections:\n    - id: sec1\n      runtime_params:\n        dp: one\n      workloads:\n        - name: antutu\n          workload_parameters:\n            markers_enabled: True\n        - benchmarkpi\n        - name: dhrystone\n          runtime_params:\n            dp: two\n    - id: sec2\n      runtime_params:\n        dp: three\n      workloads:\n        - antutu\nworkloads:\n    - memcpy\n\"\"\"\n\ndup_sectioned_agenda_text = \"\"\"\nsections:\n    - id: sec1\n      workloads:\n        - antutu\n    - id: sec1\n      workloads:\n        - benchmarkpi\nworkloads:\n    - memcpy\n\"\"\"\n\nyaml_anchors_agenda_text = \"\"\"\nworkloads:\n-   name: dhrystone\n    params: &dhrystone_single_params\n        cleanup_assets: true\n        cpus: 0\n        delay: 3\n        duration: 0\n        mloops: 10\n        threads: 1\n-   name: dhrystone\n    params:\n        <<: *dhrystone_single_params\n        threads: 4\n\"\"\"\n\n\nclass AgendaTest(TestCase):\n\n    def setUp(self):\n        reset_all_counters()\n        self.config = ConfigManager()\n        self.parser = AgendaParser()\n\n    def test_yaml_load(self):\n        self.parser.load_from_path(self.config, YAML_TEST_FILE)\n        assert_equal(len(self.config.jobs_config.root_node.workload_entries), 4)\n\n    def test_duplicate_id(self):\n        duplicate_agenda = yaml.load(duplicate_agenda_text)\n\n        try:\n            self.parser.load(self.config, duplicate_agenda, 'test')\n        except ConfigError as e:\n            assert_in('duplicate', e.message.lower())  # pylint: disable=E1101\n        else:\n            raise Exception('ConfigError was not raised for an agenda with duplicate ids.')\n\n    def test_yaml_missing_field(self):\n        invalid_agenda = yaml.load(invalid_agenda_text)\n\n        try:\n            self.parser.load(self.config, invalid_agenda, 'test')\n        except ConfigError as e:\n            assert_in('workload name', e.message)\n        else:\n            raise Exception('ConfigError was not raised for an invalid agenda.')\n\n    def test_defaults(self):\n        short_agenda = yaml.load(short_agenda_text)\n        self.parser.load(self.config, short_agenda, 'test')\n\n        workload_entries = self.config.jobs_config.root_node.workload_entries\n        assert_equal(len(workload_entries), 3)\n        assert_equal(workload_entries[0].config['workload_name'], 'antutu')\n        assert_equal(workload_entries[0].id, 'wk1')\n\n    def test_default_id_assignment(self):\n        default_ids_agenda = yaml.load(default_ids_agenda_text)\n\n        self.parser.load(self.config, default_ids_agenda, 'test2')\n        workload_entries = self.config.jobs_config.root_node.workload_entries\n        assert_equal(workload_entries[0].id, 'wk2')\n        assert_equal(workload_entries[3].id, 'wk3')\n\n    def test_sections(self):\n        sectioned_agenda = yaml.load(sectioned_agenda_text)\n        self.parser.load(self.config, sectioned_agenda, 'test')\n\n        root_node_workload_entries = self.config.jobs_config.root_node.workload_entries\n        leaves = list(self.config.jobs_config.root_node.leaves())\n        section1_workload_entries = leaves[0].workload_entries\n        section2_workload_entries = leaves[0].workload_entries\n\n        assert_equal(root_node_workload_entries[0].config['workload_name'], 'memcpy')\n        assert_true(section1_workload_entries[0].config['workload_parameters']['markers_enabled'])\n        assert_equal(section2_workload_entries[0].config['workload_name'], 'antutu')\n\n    def test_yaml_anchors(self):\n        yaml_anchors_agenda = yaml.load(yaml_anchors_agenda_text)\n        self.parser.load(self.config, yaml_anchors_agenda, 'test')\n\n        workload_entries = self.config.jobs_config.root_node.workload_entries\n        assert_equal(len(workload_entries), 2)\n        assert_equal(workload_entries[0].config['workload_name'], 'dhrystone')\n        assert_equal(workload_entries[0].config['workload_parameters']['threads'], 1)\n        assert_equal(workload_entries[0].config['workload_parameters']['delay'], 3)\n        assert_equal(workload_entries[1].config['workload_name'], 'dhrystone')\n        assert_equal(workload_entries[1].config['workload_parameters']['threads'], 4)\n        assert_equal(workload_entries[1].config['workload_parameters']['delay'], 3)\n\n    @raises(ConfigError)\n    def test_dup_sections(self):\n        dup_sectioned_agenda = yaml.load(dup_sectioned_agenda_text)\n        self.parser.load(self.config, dup_sectioned_agenda, 'test')\n\n    @raises(ConfigError)\n    def test_bad_syntax(self):\n        self.parser.load_from_path(self.config, YAML_BAD_SYNTAX_FILE)\n\n\nclass FakeTargetManager:\n\n    def merge_runtime_parameters(self, params):\n        return params\n\n    def validate_runtime_parameters(self, params):\n        pass\n\n\nclass IncludesTest(TestCase):\n\n    def test_includes(self):\n        from pprint import pprint\n        parser = AgendaParser()\n        cm = ConfigManager()\n        tm = FakeTargetManager()\n\n        includes = parser.load_from_path(cm, INCLUDES_TEST_FILE)\n        include_set = set([os.path.basename(i) for i in includes])\n        assert_equal(include_set,\n            set(['test.yaml', 'section1.yaml', 'section2.yaml',\n                 'section-include.yaml', 'workloads.yaml']))\n\n        job_classifiers = {j.id: j.classifiers\n                           for j in cm.jobs_config.generate_job_specs(tm)}\n        assert_equal(job_classifiers,\n                {\n                    's1-wk1': {'section': 'one'},\n                    's2-wk1': {'section': 'two', 'included': True},\n                    's1-wk2': {'section': 'one', 'memcpy': True},\n                    's2-wk2': {'section': 'two', 'included': True, 'memcpy': True},\n                })\n"
  },
  {
    "path": "tests/test_config.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport unittest\nfrom nose.tools import assert_equal\n\nfrom wa.framework.configuration.execution import ConfigManager\nfrom wa.utils.misc import merge_config_values\n\n\nclass TestConfigUtils(unittest.TestCase):\n\n    def test_merge_values(self):\n        test_cases = [\n            # base, other, expected_result\n            ('a', 3, 3),\n            ('a', [1, 2], ['a', 1, 2]),\n            ({1: 2}, [3, 4], [{1: 2}, 3, 4]),\n            (set([2]), [1, 2, 3], [2, 1, 3]),\n            ([1, 2, 3], set([2]), set([1, 2, 3])),\n            ([1, 2], None, [1, 2]),\n            (None, 'a', 'a'),\n        ]\n        for v1, v2, expected in test_cases:\n            result = merge_config_values(v1, v2)\n            assert_equal(result, expected)\n            if v2 is not None:\n                assert_equal(type(result), type(v2))\n\n\n\nclass TestConfigParser(unittest.TestCase):\n\n    def test_param_merge(self):\n        config = ConfigManager()\n\n        config.load_config({'workload_params': {'one': 1, 'three': {'ex': 'x'}}, 'runtime_params': {'aye': 'a'}}, 'file_one')\n        config.load_config({'workload_params': {'two': 2, 'three': {'why': 'y'}}, 'runtime_params': {'bee': 'b'}}, 'file_two')\n\n        assert_equal(\n            config.jobs_config.job_spec_template['workload_parameters'],\n            {'one': 1, 'two': 2, 'three': {'why': 'y'}},\n        )\n        assert_equal(\n            config.jobs_config.job_spec_template['runtime_parameters'],\n            {'aye': 'a', 'bee': 'b'},\n        )\n"
  },
  {
    "path": "tests/test_diff.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=E0611\n# pylint: disable=R0201\nimport os\nimport tempfile\nfrom unittest import TestCase\n\nfrom nose.tools import assert_equal\n\nfrom wa.utils.diff import diff_interrupt_files\n\n\nclass InterruptDiffTest(TestCase):\n\n    def test_interrupt_diff(self):\n        file_dir = os.path.join(os.path.dirname(__file__), 'data', 'interrupts')\n        before_file = os.path.join(file_dir, 'before')\n        after_file = os.path.join(file_dir, 'after')\n        expected_result_file = os.path.join(file_dir, 'result')\n        output_file = tempfile.mktemp()\n\n        diff_interrupt_files(before_file, after_file, output_file)\n        with open(output_file) as fh:\n            output_diff = fh.read()\n        with open(expected_result_file) as fh:\n            expected_diff = fh.read()\n        assert_equal(output_diff, expected_diff)\n\n\n"
  },
  {
    "path": "tests/test_exec_control.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=W0231,W0613,E0611,W0603,R0201\nfrom unittest import TestCase\n\nfrom nose.tools import assert_equal, assert_raises\n\nfrom wa.utils.exec_control import (init_environment, reset_environment,\n                                   activate_environment, once,\n                                   once_per_class, once_per_instance,\n                                   once_per_attribute_value)\n\nclass MockClass(object):\n\n    called = 0\n\n    def __init__(self):\n        self.count = 0\n\n    @once\n    def called_once(self):\n        MockClass.called += 1\n\n    @once\n    def initilize_once(self):\n        self.count += 1\n\n    @once_per_class\n    def initilize_once_per_class(self):\n        self.count += 1\n\n    @once_per_instance\n    def initilize_once_per_instance(self):\n        self.count += 1\n\n    def __repr__(self):\n        return '{}: Called={}'.format(self.__class__.__name__, self.called)\n\n\nclass SubClass(MockClass):\n\n    def __init__(self):\n        super(SubClass, self).__init__()\n\n    @once\n    def initilize_once(self):\n        super(SubClass, self).initilize_once()\n        self.count += 1\n\n    @once_per_class\n    def initilize_once_per_class(self):\n        super(SubClass, self).initilize_once_per_class()\n        self.count += 1\n\n    @once_per_instance\n    def initilize_once_per_instance(self):\n        super(SubClass, self).initilize_once_per_instance()\n        self.count += 1\n\n\nclass SubSubClass(SubClass):\n\n    def __init__(self):\n        super(SubSubClass, self).__init__()\n\n    @once\n    def initilize_once(self):\n        super(SubSubClass, self).initilize_once()\n        self.count += 1\n\n    @once_per_class\n    def initilize_once_per_class(self):\n        super(SubSubClass, self).initilize_once_per_class()\n        self.count += 1\n\n    @once_per_instance\n    def initilize_once_per_instance(self):\n        super(SubSubClass, self).initilize_once_per_instance()\n        self.count += 1\n\n\nclass AnotherClass(object):\n\n    def __init__(self):\n        self.count = 0\n\n    @once\n    def initilize_once(self):\n        self.count += 1\n\n    @once_per_class\n    def initilize_once_per_class(self):\n        self.count += 1\n\n    @once_per_instance\n    def initilize_once_per_instance(self):\n        self.count += 1\n\n\nclass NamedClass:\n\n    count = 0\n\n    def __init__(self, name):\n        self.name = name\n\n    @once_per_attribute_value('name')\n    def initilize(self):\n        NamedClass.count += 1\n\n\nclass AnotherSubClass(MockClass):\n\n    def __init__(self):\n        super(AnotherSubClass, self).__init__()\n\n    @once\n    def initilize_once(self):\n        super(AnotherSubClass, self).initilize_once()\n        self.count += 1\n\n    @once_per_class\n    def initilize_once_per_class(self):\n        super(AnotherSubClass, self).initilize_once_per_class()\n        self.count += 1\n\n    @once_per_instance\n    def initilize_once_per_instance(self):\n        super(AnotherSubClass, self).initilize_once_per_instance()\n        self.count += 1\n\n\nclass EnvironmentManagementTest(TestCase):\n\n    def test_duplicate_environment(self):\n        init_environment('ENVIRONMENT')\n        assert_raises(ValueError, init_environment, 'ENVIRONMENT')\n\n    def test_reset_missing_environment(self):\n        assert_raises(ValueError, reset_environment, 'MISSING')\n\n    def test_reset_current_environment(self):\n        activate_environment('CURRENT_ENVIRONMENT')\n        t1 = MockClass()\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        reset_environment()\n        t1.initilize_once()\n        assert_equal(t1.count, 2)\n\n    def test_switch_environment(self):\n        activate_environment('ENVIRONMENT1')\n        t1 = MockClass()\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        activate_environment('ENVIRONMENT2')\n        t1.initilize_once()\n        assert_equal(t1.count, 2)\n\n        activate_environment('ENVIRONMENT1')\n        t1.initilize_once()\n        assert_equal(t1.count, 2)\n\n    def test_reset_environment_name(self):\n        activate_environment('ENVIRONMENT')\n        t1 = MockClass()\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        reset_environment('ENVIRONMENT')\n        t1.initilize_once()\n        assert_equal(t1.count, 2)\n\n\nclass ParentOnlyOnceEvironmentTest(TestCase):\n    def test_sub_classes(self):\n        sc = SubClass()\n        asc = AnotherSubClass()\n\n        sc.called_once()\n        assert_equal(sc.called, 1)\n        asc.called_once()\n        assert_equal(asc.called, 1)\n\n\nclass OnlyOnceEnvironmentTest(TestCase):\n\n    def setUp(self):\n        activate_environment('TEST_ENVIRONMENT')\n\n    def tearDown(self):\n        reset_environment('TEST_ENVIRONMENT')\n\n    def test_single_instance(self):\n        t1 = MockClass()\n        ac = AnotherClass()\n\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        ac.initilize_once()\n        assert_equal(ac.count, 1)\n\n\n    def test_mulitple_instances(self):\n        t1 = MockClass()\n        t2 = MockClass()\n\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        t2.initilize_once()\n        assert_equal(t2.count, 0)\n\n\n    def test_sub_classes(self):\n        t1 = MockClass()\n        sc = SubClass()\n        ss = SubSubClass()\n        asc = AnotherSubClass()\n\n        t1.initilize_once()\n        assert_equal(t1.count, 1)\n\n        sc.initilize_once()\n        sc.initilize_once()\n        assert_equal(sc.count, 1)\n\n        ss.initilize_once()\n        ss.initilize_once()\n        assert_equal(ss.count, 1)\n\n        asc.initilize_once()\n        asc.initilize_once()\n        assert_equal(asc.count, 1)\n\n\nclass OncePerClassEnvironmentTest(TestCase):\n\n    def setUp(self):\n        activate_environment('TEST_ENVIRONMENT')\n\n    def tearDown(self):\n        reset_environment('TEST_ENVIRONMENT')\n\n    def test_single_instance(self):\n        t1 = MockClass()\n        ac = AnotherClass()\n\n        t1.initilize_once_per_class()\n        assert_equal(t1.count, 1)\n\n        t1.initilize_once_per_class()\n        assert_equal(t1.count, 1)\n\n        ac.initilize_once_per_class()\n        assert_equal(ac.count, 1)\n\n\n    def test_mulitple_instances(self):\n        t1 = MockClass()\n        t2 = MockClass()\n\n        t1.initilize_once_per_class()\n        assert_equal(t1.count, 1)\n\n        t2.initilize_once_per_class()\n        assert_equal(t2.count, 0)\n\n\n    def test_sub_classes(self):\n        t1 = MockClass()\n        sc1 = SubClass()\n        sc2 = SubClass()\n        ss1 = SubSubClass()\n        ss2 = SubSubClass()\n        asc = AnotherSubClass()\n\n        t1.initilize_once_per_class()\n        assert_equal(t1.count, 1)\n\n        sc1.initilize_once_per_class()\n        sc2.initilize_once_per_class()\n        assert_equal(sc1.count, 1)\n        assert_equal(sc2.count, 0)\n\n        ss1.initilize_once_per_class()\n        ss2.initilize_once_per_class()\n        assert_equal(ss1.count, 1)\n        assert_equal(ss2.count, 0)\n\n        asc.initilize_once_per_class()\n        assert_equal(asc.count, 1)\n\n\nclass OncePerInstanceEnvironmentTest(TestCase):\n\n    def setUp(self):\n        activate_environment('TEST_ENVIRONMENT')\n\n    def tearDown(self):\n        reset_environment('TEST_ENVIRONMENT')\n\n    def test_single_instance(self):\n        t1 = MockClass()\n        ac = AnotherClass()\n\n        t1.initilize_once_per_instance()\n        assert_equal(t1.count, 1)\n\n        t1.initilize_once_per_instance()\n        assert_equal(t1.count, 1)\n\n        ac.initilize_once_per_instance()\n        assert_equal(ac.count, 1)\n\n\n    def test_mulitple_instances(self):\n        t1 = MockClass()\n        t2 = MockClass()\n\n        t1.initilize_once_per_instance()\n        assert_equal(t1.count, 1)\n\n        t2.initilize_once_per_instance()\n        assert_equal(t2.count, 1)\n\n\n    def test_sub_classes(self):\n        t1 = MockClass()\n        sc = SubClass()\n        ss = SubSubClass()\n        asc = AnotherSubClass()\n\n        t1.initilize_once_per_instance()\n        assert_equal(t1.count, 1)\n\n        sc.initilize_once_per_instance()\n        sc.initilize_once_per_instance()\n        assert_equal(sc.count, 2)\n\n        ss.initilize_once_per_instance()\n        ss.initilize_once_per_instance()\n        assert_equal(ss.count, 3)\n\n        asc.initilize_once_per_instance()\n        asc.initilize_once_per_instance()\n        assert_equal(asc.count, 2)\n\n\nclass OncePerAttributeValueTest(TestCase):\n\n    def setUp(self):\n        activate_environment('TEST_ENVIRONMENT')\n\n    def tearDown(self):\n        reset_environment('TEST_ENVIRONMENT')\n\n    def test_once_attribute_value(self):\n        classes = [\n                NamedClass('Rick'),\n                NamedClass('Morty'),\n                NamedClass('Rick'),\n                NamedClass('Morty'),\n                NamedClass('Morty'),\n                NamedClass('Summer'),\n        ]\n\n        for c in classes:\n            c.initilize()\n\n        for c in classes:\n            c.initilize()\n\n        assert_equal(NamedClass.count, 3)\n"
  },
  {
    "path": "tests/test_execution.py",
    "content": "#    Copyright 2020 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport tempfile\nfrom unittest import TestCase\n\nfrom mock.mock import Mock\nfrom nose.tools import assert_equal\nfrom datetime import datetime\n\nfrom wa.framework.configuration import RunConfiguration\nfrom wa.framework.configuration.core import JobSpec, Status\nfrom wa.framework.execution import ExecutionContext, Runner\nfrom wa.framework.job import Job\nfrom wa.framework.output import RunOutput, init_run_output\nfrom wa.framework.output_processor import ProcessorManager\nimport wa.framework.signal as signal\nfrom wa.framework.run import JobState\nfrom wa.framework.exception import ExecutionError\n\n\nclass MockConfigManager(Mock):\n\n    @property\n    def jobs(self):\n        return self._joblist\n\n    @property\n    def loaded_config_sources(self):\n        return []\n\n    @property\n    def plugin_cache(self):\n        return MockPluginCache()\n\n    def __init__(self, *args, **kwargs):\n        super(MockConfigManager, self).__init__(*args, **kwargs)\n        self._joblist = None\n        self.run_config = RunConfiguration()\n\n    def to_pod(self):\n        return {}\n\n\nclass MockPluginCache(Mock):\n\n    def list_plugins(self, kind=None):\n        return []\n\n\nclass MockProcessorManager(Mock):\n\n    def __init__(self, *args, **kwargs):\n        super(MockProcessorManager, self).__init__(*args, **kwargs)\n\n    def get_enabled(self):\n        return []\n\n\nclass JobState_force_retry(JobState):\n\n    @property\n    def status(self):\n        return self._status\n\n    @status.setter\n    def status(self, value):\n        if(self.retries != self.times_to_retry) and (value == Status.RUNNING):\n            self._status = Status.FAILED\n            if self.output:\n                self.output.status = Status.FAILED\n        else:\n            self._status = value\n            if self.output:\n                self.output.status = value\n\n    def __init__(self, to_retry, *args, **kwargs):\n        self.retries = 0\n        self._status = Status.NEW\n        self.times_to_retry = to_retry\n        self.output = None\n        super(JobState_force_retry, self).__init__(*args, **kwargs)\n\n\nclass Job_force_retry(Job):\n    '''This class imitates a job that retries as many times as specified by\n    ``retries`` in its constructor'''\n\n    def __init__(self, to_retry, *args, **kwargs):\n        super(Job_force_retry, self).__init__(*args, **kwargs)\n        self.state = JobState_force_retry(to_retry, self.id, self.label, self.iteration, Status.NEW)\n        self.initialized = False\n        self.finalized = False\n\n    def initialize(self, context):\n        self.initialized = True\n        return super().initialize(context)\n    \n    def finalize(self, context):\n        self.finalized = True\n        return super().finalize(context)\n\n\nclass TestRunState(TestCase):\n\n    def setUp(self):\n        self.path = tempfile.mkstemp()[1]\n        os.remove(self.path)\n        self.initialise_signals()\n        self.context = get_context(self.path)\n        self.job_spec = get_jobspec()\n\n    def tearDown(self):\n        signal.disconnect(self._verify_serialized_state, signal.RUN_INITIALIZED)\n        signal.disconnect(self._verify_serialized_state, signal.JOB_STARTED)\n        signal.disconnect(self._verify_serialized_state, signal.JOB_RESTARTED)\n        signal.disconnect(self._verify_serialized_state, signal.JOB_COMPLETED)\n        signal.disconnect(self._verify_serialized_state, signal.JOB_FAILED)\n        signal.disconnect(self._verify_serialized_state, signal.JOB_ABORTED)\n        signal.disconnect(self._verify_serialized_state, signal.RUN_FINALIZED)\n\n    def test_job_state_transitions_pass(self):\n        '''Tests state equality when the job passes first try'''\n        job = Job(self.job_spec, 1, self.context)\n        job.workload = Mock()\n\n        self.context.cm._joblist = [job]\n        self.context.run_state.add_job(job)\n\n        runner = Runner(self.context, MockProcessorManager())\n        runner.run()\n\n    def test_job_state_transitions_fail(self):\n        '''Tests state equality when job fails completely'''\n        job = Job_force_retry(3, self.job_spec, 1, self.context)\n        job.workload = Mock()\n\n        self.context.cm._joblist = [job]\n        self.context.run_state.add_job(job)\n\n        runner = Runner(self.context, MockProcessorManager())\n        runner.run()\n\n    def test_job_state_transitions_retry(self):\n        '''Tests state equality when job fails initially'''\n        job = Job_force_retry(1, self.job_spec, 1, self.context)\n        job.workload = Mock()\n\n        self.context.cm._joblist = [job]\n        self.context.run_state.add_job(job)\n\n        runner = Runner(self.context, MockProcessorManager())\n        runner.run()\n\n    def initialise_signals(self):\n        signal.connect(self._verify_serialized_state, signal.RUN_INITIALIZED)\n        signal.connect(self._verify_serialized_state, signal.JOB_STARTED)\n        signal.connect(self._verify_serialized_state, signal.JOB_RESTARTED)\n        signal.connect(self._verify_serialized_state, signal.JOB_COMPLETED)\n        signal.connect(self._verify_serialized_state, signal.JOB_FAILED)\n        signal.connect(self._verify_serialized_state, signal.JOB_ABORTED)\n        signal.connect(self._verify_serialized_state, signal.RUN_FINALIZED)\n\n    def _verify_serialized_state(self, _):\n        fs_state = RunOutput(self.path).state\n        ex_state = self.context.run_output.state\n\n        assert_equal(fs_state.status, ex_state.status)\n        fs_js_zip = zip(\n            [value for key, value in fs_state.jobs.items()],\n            [value for key, value in ex_state.jobs.items()]\n        )\n        for fs_jobstate, ex_jobstate in fs_js_zip:\n            assert_equal(fs_jobstate.iteration, ex_jobstate.iteration)\n            assert_equal(fs_jobstate.retries, ex_jobstate.retries)\n            assert_equal(fs_jobstate.status, ex_jobstate.status)\n\n\nclass TestJobState(TestCase):\n\n    def test_job_retry_status(self):\n        job_spec = get_jobspec()\n        context = get_context()\n\n        job = Job_force_retry(2, job_spec, 1, context)\n        job.workload = Mock()\n\n        context.cm._joblist = [job]\n        context.run_state.add_job(job)\n\n        verifier = lambda _: assert_equal(job.status, Status.PENDING)\n        signal.connect(verifier, signal.JOB_RESTARTED)\n\n        runner = Runner(context, MockProcessorManager())\n        runner.run()\n        signal.disconnect(verifier, signal.JOB_RESTARTED)\n\n    def test_skipped_job_state(self):\n        # Test, if the first job fails and the bail parameter set,\n        # that the remaining jobs have status: SKIPPED\n        job_spec = get_jobspec()\n        context = get_context()\n\n        context.cm.run_config.bail_on_job_failure = True\n\n        job1 = Job_force_retry(3, job_spec, 1, context)\n        job2 = Job(job_spec, 1, context)\n        job1.workload = Mock()\n        job2.workload = Mock()\n\n        context.cm._joblist = [job1, job2]\n        context.run_state.add_job(job1)\n        context.run_state.add_job(job2)\n\n        runner = Runner(context, MockProcessorManager())\n        try:\n            runner.run()\n        except ExecutionError:\n            assert_equal(job2.status, Status.SKIPPED)\n        else:\n            assert False, \"ExecutionError not raised\"\n\n    def test_normal_job_finalized(self):\n        # Test that a job is initialized then finalized normally\n        job_spec = get_jobspec()\n        context = get_context()\n\n        job = Job_force_retry(0, job_spec, 1, context)\n        job.workload = Mock()\n\n        context.cm._joblist = [job]\n        context.run_state.add_job(job)\n\n        runner = Runner(context, MockProcessorManager())\n        runner.run()\n\n        assert_equal(job.initialized, True)\n        assert_equal(job.finalized, True)\n\n    def test_skipped_job_finalized(self):\n        # Test that a skipped job has been finalized\n        job_spec = get_jobspec()\n        context = get_context()\n\n        context.cm.run_config.bail_on_job_failure = True\n\n        job1 = Job_force_retry(3, job_spec, 1, context)\n        job2 = Job_force_retry(0, job_spec, 1, context)\n        job1.workload = Mock()\n        job2.workload = Mock()\n\n        context.cm._joblist = [job1, job2]\n        context.run_state.add_job(job1)\n        context.run_state.add_job(job2)\n\n        runner = Runner(context, MockProcessorManager())\n        try:\n            runner.run()\n        except ExecutionError:\n            assert_equal(job2.finalized, True)\n        else:\n            assert False, \"ExecutionError not raised\"\n\n    def test_failed_job_finalized(self):\n        # Test that a failed job, while the bail parameter is set,\n        # is finalized\n        job_spec = get_jobspec()\n        context = get_context()\n\n        context.cm.run_config.bail_on_job_failure = True\n\n        job1 = Job_force_retry(3, job_spec, 1, context)\n        job1.workload = Mock()\n\n        context.cm._joblist = [job1]\n        context.run_state.add_job(job1)\n\n        runner = Runner(context, MockProcessorManager())\n        try:\n            runner.run()\n        except ExecutionError:\n            assert_equal(job1.finalized, True)\n        else:\n            assert False, \"ExecutionError not raised\"\n\n\ndef get_context(path=None):\n    if not path:\n        path = tempfile.mkstemp()[1]\n        os.remove(path)\n\n    config = MockConfigManager()\n    output = init_run_output(path, config)\n\n    return ExecutionContext(config, Mock(), output)\n\n\ndef get_jobspec():\n    job_spec = JobSpec()\n    job_spec.augmentations = {}\n    job_spec.finalize()\n    return job_spec\n"
  },
  {
    "path": "tests/test_plugin.py",
    "content": "#    Copyright 2014-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=E0611,R0201,E1101\nimport os\nfrom unittest import TestCase\n\nfrom nose.tools import assert_equal, raises, assert_true\n\nfrom wa.framework.plugin import Plugin, PluginMeta, PluginLoader, Parameter\nfrom wa.utils.types import list_of_ints\nfrom wa import ConfigError\n\n\nEXTDIR = os.path.join(os.path.dirname(__file__), 'data', 'extensions')\n\n\nclass PluginLoaderTest(TestCase):\n\n    def setUp(self):\n        self.loader = PluginLoader(paths=[EXTDIR, ])\n\n    def test_load_device(self):\n        device = self.loader.get_device('test-device')\n        assert_equal(device.name, 'test-device')\n\n    def test_list_by_kind(self):\n        exts = self.loader.list_devices()\n        assert_equal(len(exts), 1)\n        assert_equal(exts[0].name, 'test-device')\n\n\n\nclass MyBasePlugin(Plugin):\n\n    name = 'base'\n    kind = 'test'\n\n    parameters = [\n        Parameter('base'),\n    ]\n\n    def __init__(self, **kwargs):\n        super(MyBasePlugin, self).__init__(**kwargs)\n        self.v1 = 0\n        self.v2 = 0\n        self.v3 = ''\n\n    def virtual1(self):\n        self.v1 += 1\n        self.v3 = 'base'\n\n    def virtual2(self):\n        self.v2 += 1\n\n\nclass MyAcidPlugin(MyBasePlugin):\n\n    name = 'acid'\n\n    parameters = [\n        Parameter('hydrochloric', kind=list_of_ints, default=[1, 2]),\n        Parameter('citric'),\n        Parameter('carbonic', kind=int),\n    ]\n\n    def __init__(self, **kwargs):\n        super(MyAcidPlugin, self).__init__(**kwargs)\n        self.vv1 = 0\n        self.vv2 = 0\n\n    def virtual1(self):\n        self.vv1 += 1\n        self.v3 = 'acid'\n\n    def virtual2(self):\n        self.vv2 += 1\n\n\nclass MyOtherPlugin(MyBasePlugin):\n\n    name = 'other'\n\n    parameters = [\n        Parameter('mandatory', mandatory=True),\n        Parameter('optional', allowed_values=['test', 'check']),\n    ]\n\nclass MyOtherOtherPlugin(MyOtherPlugin):\n\n    name = 'otherother'\n\n    parameters = [\n        Parameter('mandatory', override=True),\n    ]\n\n\nclass MyOverridingPlugin(MyAcidPlugin):\n\n    name = 'overriding'\n\n    parameters = [\n        Parameter('hydrochloric', override=True, default=[3, 4]),\n    ]\n\n\nclass MyThirdTeerPlugin(MyOverridingPlugin):\n\n    name = 'thirdteer'\n\n\nclass MultiValueParamExt(Plugin):\n\n    name = 'multivalue'\n    kind = 'test'\n\n    parameters = [\n        Parameter('test', kind=list_of_ints, allowed_values=[42, 7, 73]),\n    ]\n\n\nclass PluginMetaTest(TestCase):\n\n    def test_propagation(self):\n        acid_params = [p.name for p in MyAcidPlugin.parameters]\n        assert_equal(acid_params, ['base', 'hydrochloric', 'citric', 'carbonic'])\n\n    @raises(ValueError)\n    def test_duplicate_param_spec(self):\n        class BadPlugin(MyBasePlugin):  # pylint: disable=W0612\n            parameters = [\n                Parameter('base'),\n            ]\n\n    def test_param_override(self):\n        class OverridingPlugin(MyBasePlugin):  # pylint: disable=W0612\n            parameters = [\n                Parameter('base', override=True, default='cheese'),\n            ]\n        assert_equal(OverridingPlugin.parameters['base'].default, 'cheese')\n\n    @raises(ValueError)\n    def test_invalid_param_spec(self):\n        class BadPlugin(MyBasePlugin):  # pylint: disable=W0612\n            parameters = [\n                7,\n            ]\n\n\nclass ParametersTest(TestCase):\n\n    def test_setting(self):\n        myext = MyAcidPlugin(hydrochloric=[5, 6], citric=5, carbonic=42)\n        assert_equal(myext.hydrochloric, [5, 6])\n        assert_equal(myext.citric, '5')\n        assert_equal(myext.carbonic, 42)\n\n    def test_validation_ok(self):\n        myext = MyOtherPlugin(mandatory='check', optional='check')\n        myext.validate()\n\n    def test_default_override(self):\n        myext = MyOverridingPlugin()\n        assert_equal(myext.hydrochloric, [3, 4])\n        myotherext = MyThirdTeerPlugin()\n        assert_equal(myotherext.hydrochloric, [3, 4])\n\n    def test_multivalue_param(self):\n        myext = MultiValueParamExt(test=[7, 42])\n        myext.validate()\n        assert_equal(myext.test, [7, 42])\n\n    @raises(ConfigError)\n    def test_bad_multivalue_param(self):\n        myext = MultiValueParamExt(test=[5])\n        myext.validate()\n\n    @raises(ConfigError)\n    def test_validation_no_mandatory(self):\n        myext = MyOtherPlugin(optional='check')\n        myext.validate()\n\n    @raises(ConfigError)\n    def test_validation_no_mandatory_in_derived(self):\n        MyOtherOtherPlugin()\n\n    @raises(ConfigError)\n    def test_validation_bad_value(self):\n        myext = MyOtherPlugin(mandatory=1, optional='invalid')\n        myext.validate()\n\n"
  },
  {
    "path": "tests/test_runtime_param_utils.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport unittest\nfrom nose.tools import assert_equal\nfrom mock.mock import Mock\n\nfrom wa.utils.misc import resolve_cpus, resolve_unique_domain_cpus\n\nclass TestRuntimeParameterUtils(unittest.TestCase):\n\n    def test_resolve_cpu(self):\n        # Set up a mock target\n        mock = Mock()\n        mock.big_core = \"A72\"\n        mock.little_core = \"A53\"\n        mock.core_names = ['A72', 'A72', 'A53', 'A53']\n        mock.number_of_cpus = 4\n        def mock_core_cpus(core):\n            return [i for i, c in enumerate(mock.core_names) if c == core]\n        def mock_online_cpus():\n            return [0, 1, 2]\n        def mock_offline_cpus():\n            return [3]\n        def mock_related_cpus(core):\n            if core in [0, 1]:\n                return [0, 1]\n            elif core in [2, 3]:\n                return [2, 3]\n\n        mock.list_online_cpus = mock_online_cpus\n        mock.list_offline_cpus = mock_offline_cpus\n        mock.core_cpus = mock_core_cpus\n        mock.core_cpus = mock_core_cpus\n        mock.cpufreq.get_related_cpus = mock_related_cpus\n\n        # Check retrieving cpus from a given prefix\n        assert_equal(resolve_cpus('A72', mock), [0, 1])\n        assert_equal(resolve_cpus('A53', mock), [2, 3])\n        assert_equal(resolve_cpus('big', mock), [0, 1])\n        assert_equal(resolve_cpus('little', mock), [2, 3])\n        assert_equal(resolve_cpus('', mock), [0, 1, 2, 3])\n        assert_equal(resolve_cpus('cpu0', mock), [0])\n        assert_equal(resolve_cpus('cpu3', mock), [3])\n\n        # Check get unique domain cpus\n        assert_equal(resolve_unique_domain_cpus('A72', mock), [0])\n        assert_equal(resolve_unique_domain_cpus('A53', mock), [2])\n        assert_equal(resolve_unique_domain_cpus('big', mock), [0])\n        assert_equal(resolve_unique_domain_cpus('little', mock), [2])\n        assert_equal(resolve_unique_domain_cpus('', mock), [0, 2])\n        assert_equal(resolve_unique_domain_cpus('cpu0', mock), [0])\n        assert_equal(resolve_unique_domain_cpus('cpu3', mock), [2])\n"
  },
  {
    "path": "tests/test_signal.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\nimport unittest\n\nfrom nose.tools import assert_equal, assert_true, assert_false\n\nimport wa.framework.signal as signal\n\n\nclass Callable(object):\n\n    def __init__(self, val):\n        self.val = val\n\n    def __call__(self):\n        return self.val\n\n\nclass TestSignalDisconnect(unittest.TestCase):\n\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        self.callback_ctr = 0\n\n    def setUp(self):\n        signal.connect(self._call_me_once, 'first')\n        signal.connect(self._call_me_once, 'second')\n\n    def test_handler_disconnected(self):\n        signal.send('first')\n        signal.send('second')\n\n    def _call_me_once(self):\n        assert_equal(self.callback_ctr, 0)\n        self.callback_ctr += 1\n        signal.disconnect(self._call_me_once, 'first')\n        signal.disconnect(self._call_me_once, 'second')\n\n\nclass TestPriorityDispatcher(unittest.TestCase):\n\n    def setUp(self):\n        # Stop logger output interfering with nose output in the console.\n        logger = logging.getLogger('signal')\n        logger.setLevel(logging.CRITICAL)\n\n    def test_ConnectNotify(self):\n        one = Callable(1)\n        two = Callable(2)\n        three = Callable(3)\n        signal.connect(\n            two,\n            'test',\n            priority=200\n        )\n        signal.connect(\n            one,\n            'test',\n            priority=100\n        )\n        signal.connect(\n            three,\n            'test',\n            priority=300\n        )\n        result = [i[1] for i in signal.send('test')]\n        assert_equal(result, [3, 2, 1])\n\n    def test_wrap_propagate(self):\n        d = {'before': False, 'after': False, 'success': False}\n\n        def before():\n            d['before'] = True\n\n        def after():\n            d['after'] = True\n\n        def success():\n            d['success'] = True\n\n        signal.connect(before, signal.BEFORE_WORKLOAD_SETUP)\n        signal.connect(after, signal.AFTER_WORKLOAD_SETUP)\n        signal.connect(success, signal.SUCCESSFUL_WORKLOAD_SETUP)\n\n        caught = False\n        try:\n            with signal.wrap('WORKLOAD_SETUP'):\n                raise RuntimeError()\n        except RuntimeError:\n            caught = True\n\n        assert_true(d['before'])\n        assert_true(d['after'])\n        assert_true(caught)\n        assert_false(d['success'])\n"
  },
  {
    "path": "tests/test_utils.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=R0201\nfrom unittest import TestCase\n\nfrom nose.tools import raises, assert_equal, assert_not_equal, assert_in, assert_not_in\nfrom nose.tools import assert_true, assert_false, assert_raises, assert_is, assert_list_equal\n\nfrom wa.utils.types import (list_or_integer, list_or_bool, caseless_string,\n                            arguments, prioritylist, enum, level, toggle_set)\n\n\n\nclass TestPriorityList(TestCase):\n\n    def test_insert(self):\n        pl = prioritylist()\n        elements = {3: \"element 3\",\n                    2: \"element 2\",\n                    1: \"element 1\",\n                    5: \"element 5\",\n                    4: \"element 4\"\n                    }\n        for key in elements:\n            pl.add(elements[key], priority=key)\n\n        match = list(zip(sorted(elements.values()), pl[:]))\n        for pair in match:\n            assert(pair[0] == pair[1])\n\n    def test_delete(self):\n        pl = prioritylist()\n        elements = {2: \"element 3\",\n                    1: \"element 2\",\n                    0: \"element 1\",\n                    4: \"element 5\",\n                    3: \"element 4\"\n                    }\n        for key in elements:\n            pl.add(elements[key], priority=key)\n        del elements[2]\n        del pl[2]\n        match = list(zip(sorted(elements.values()), pl[:]))\n        for pair in match:\n            assert(pair[0] == pair[1])\n\n    def test_multiple(self):\n        pl = prioritylist()\n        pl.add('1', 1)\n        pl.add('2.1', 2)\n        pl.add('3', 3)\n        pl.add('2.2', 2)\n        it = iter(pl)\n        assert_equal(next(it), '3')\n        assert_equal(next(it), '2.1')\n        assert_equal(next(it), '2.2')\n        assert_equal(next(it), '1')\n\n    def test_iterator_break(self):\n        pl = prioritylist()\n        pl.add('1', 1)\n        pl.add('2.1', 2)\n        pl.add('3', 3)\n        pl.add('2.2', 2)\n        for i in pl:\n            if i == '2.1':\n                break\n        assert_equal(pl.index('3'), 3)\n\n    def test_add_before_after(self):\n        pl = prioritylist()\n        pl.add('m', 1)\n        pl.add('a', 2)\n        pl.add('n', 1)\n        pl.add('b', 2)\n        pl.add_before('x', 'm')\n        assert_equal(list(pl), ['a', 'b', 'x', 'm', 'n'])\n        pl.add_after('y', 'b')\n        assert_equal(list(pl), ['a', 'b','y', 'x', 'm', 'n'])\n        pl.add_after('z', 'm')\n        assert_equal(list(pl), ['a', 'b', 'y', 'x', 'm', 'z', 'n'])\n\n\nclass TestEnumLevel(TestCase):\n\n    def test_enum_creation(self):\n        e = enum(['one', 'two', 'three'])\n        assert_list_equal(e.values, [0, 1, 2])\n\n        e = enum(['one', 'two', 'three'], start=10)\n        assert_list_equal(e.values, [10, 11, 12])\n\n        e = enum(['one', 'two', 'three'], start=-10, step=10)\n        assert_list_equal(e.values, [-10, 0, 10])\n\n    def test_enum_name_conflicts(self):\n        assert_raises(ValueError, enum, ['names', 'one', 'two'])\n\n        e = enum(['NAMES', 'one', 'two'])\n        assert_in('names', e.levels)\n        assert_list_equal(e.names, ['names', 'one', 'two'])\n        assert_equal(e.ONE, 'one')\n        result = not (e.ONE != 'one')\n        assert_true(result)\n\n    def test_enum_behavior(self):\n        e = enum(['one', 'two', 'three'])\n\n        # case-insensitive level name and level value may all\n        # be used for equality comparisons.\n        assert_equal(e.one, 'one')\n        assert_equal(e.one, 'ONE')\n        assert_equal(e.one, 0)\n        assert_not_equal(e.one, '0')\n\n        # ditto for enum membership tests\n        assert_in('one', e.levels)\n        assert_in(2, e.levels)\n        assert_not_in('five', e.levels)\n\n        # The same level object returned, only when\n        # passing in a valid level name/value.\n        assert_is(e('one'), e('ONE'))\n        assert_is(e('one'), e(0))\n        assert_raises(ValueError, e, 'five')\n\n    def test_serialize_level(self):\n        l = level('test', 1)\n        s = l.to_pod()\n        l2 = level.from_pod(s)\n        assert_equal(l, l2)\n\n    def test_deserialize_enum(self):\n        e = enum(['one', 'two', 'three'])\n        s = e.one.to_pod()\n        l = e.from_pod(s)\n        assert_equal(l, e.one)\n\n\nclass  TestToggleSet(TestCase):\n\n    def test_equality(self):\n        ts1 = toggle_set(['one', 'two',])\n        ts2 = toggle_set(['one', 'two', '~three'])\n\n        assert_not_equal(ts1, ts2)\n        assert_equal(ts1.values(), ts2.values())\n        assert_equal(ts2, toggle_set(['two', '~three', 'one']))\n\n    def test_merge(self):\n        ts1 = toggle_set(['one', 'two', 'three', '~four', '~five'])\n        ts2 = toggle_set(['two', '~three', 'four', '~five'])\n\n        ts3 = ts1.merge_with(ts2)\n        assert_equal(ts1, toggle_set(['one', 'two', 'three', '~four', '~five']))\n        assert_equal(ts2, toggle_set(['two', '~three', 'four', '~five']))\n        assert_equal(ts3, toggle_set(['one', 'two', '~three', 'four', '~five']))\n        assert_equal(ts3.values(), set(['one', 'two','four']))\n\n        ts4 = ts1.merge_into(ts2)\n        assert_equal(ts1, toggle_set(['one', 'two', 'three', '~four', '~five']))\n        assert_equal(ts2, toggle_set(['two', '~three', 'four', '~five']))\n        assert_equal(ts4, toggle_set(['one', 'two', 'three', '~four', '~five']))\n        assert_equal(ts4.values(), set(['one', 'two', 'three']))\n\n    def test_drop_all_previous(self):\n        ts1 = toggle_set(['one', 'two', 'three'])\n        ts2 = toggle_set(['four', '~~', 'five'])\n        ts3 = toggle_set(['six', 'seven', '~three'])\n\n        ts4 = ts1.merge_with(ts2).merge_with(ts3)\n        assert_equal(ts4, toggle_set(['four', 'five', 'six', 'seven', '~three', '~~']))\n\n        ts5 = ts2.merge_into(ts3).merge_into(ts1)\n        assert_equal(ts5, toggle_set(['four', 'five', '~~']))\n\n        ts6 = ts2.merge_into(ts3).merge_with(ts1)\n        assert_equal(ts6, toggle_set(['one', 'two', 'three', 'four', 'five', '~~']))\n\n    def test_order_on_create(self):\n        ts1 = toggle_set(['one', 'two', 'three', '~one'])\n        assert_equal(ts1, toggle_set(['~one', 'two', 'three']))\n\n        ts1 = toggle_set(['~one', 'two', 'three', 'one'])\n        assert_equal(ts1, toggle_set(['one', 'two', 'three']))\n"
  },
  {
    "path": "wa/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom wa.framework import pluginloader, signal\nfrom wa.framework.command import Command, ComplexCommand, SubCommand\nfrom wa.framework.configuration import settings\nfrom wa.framework.configuration.core import Status\nfrom wa.framework.exception import (CommandError, ConfigError, HostError, InstrumentError,  # pylint: disable=redefined-builtin\n                                    JobError, NotFoundError, OutputProcessorError,\n                                    PluginLoaderError, ResourceError, TargetError,\n                                    TargetNotRespondingError, TimeoutError, ToolError,\n                                    ValidationError, WAError, WorkloadError, WorkerThreadError)\nfrom wa.framework.instrument import (Instrument, extremely_slow, very_slow, slow, normal, fast,\n                                     very_fast, extremely_fast, hostside)\nfrom wa.framework.output import RunOutput, discover_wa_outputs\nfrom wa.framework.output_processor import OutputProcessor\nfrom wa.framework.plugin import Plugin, Parameter, Alias\nfrom wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,\n                                   Executable)\nfrom wa.framework.target.descriptor import (TargetDescriptor, TargetDescription,\n                                            create_target_description, add_description_for_target)\nfrom wa.framework.workload import (Workload, ApkWorkload, ApkUiautoWorkload,\n                                   ApkReventWorkload, UIWorkload, UiautoWorkload,\n                                   PackageHandler, ReventWorkload, TestPackageHandler)\n\n\nfrom wa.framework.version import get_wa_version, get_wa_version_with_commit\n\n__version__ = get_wa_version()\n__full_version__ = get_wa_version_with_commit()\n"
  },
  {
    "path": "wa/commands/__init__.py",
    "content": ""
  },
  {
    "path": "wa/commands/create.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\nimport sys\nimport stat\nimport shutil\nimport string\nimport re\nimport uuid\nimport getpass\nfrom collections import OrderedDict\n\nfrom devlib.utils.types import identifier\ntry:\n    import psycopg2\n    from psycopg2 import connect, OperationalError, extras\n    from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT\nexcept ImportError as e:\n    psycopg2 = None\n    import_error_msg = e.args[0] if e.args else str(e)\n\nfrom wa import ComplexCommand, SubCommand, pluginloader, settings\nfrom wa.framework.target.descriptor import list_target_descriptions\nfrom wa.framework.exception import ConfigError, CommandError\nfrom wa.instruments.energy_measurement import EnergyInstrumentBackend\nfrom wa.utils.misc import (ensure_directory_exists as _d, capitalize,\n                           ensure_file_directory_exists as _f)\nfrom wa.utils.postgres import get_schema, POSTGRES_SCHEMA_DIR\nfrom wa.utils.serializer import yaml\n\nif sys.version_info >= (3, 8):\n    def copy_tree(src, dst):\n        from shutil import copy, copytree  # pylint: disable=import-outside-toplevel\n        copytree(\n            src,\n            dst,\n            # dirs_exist_ok=True only exists in Python >= 3.8\n            dirs_exist_ok=True,\n            # Align with devlib and only copy the content without metadata\n            copy_function=copy\n        )\nelse:\n    def copy_tree(src, dst):\n        # pylint: disable=import-outside-toplevel, redefined-outer-name\n        from distutils.dir_util import copy_tree\n        # Align with devlib and only copy the content without metadata\n        copy_tree(src, dst, preserve_mode=False, preserve_times=False)\n\n\nTEMPLATES_DIR = os.path.join(os.path.dirname(__file__), 'templates')\n\n\nclass CreateDatabaseSubcommand(SubCommand):\n\n    name = 'database'\n    description = \"\"\"\n    Create a Postgresql database which is compatible with the WA Postgres\n    output processor.\n    \"\"\"\n\n    schemafilepath = os.path.join(POSTGRES_SCHEMA_DIR, 'postgres_schema.sql')\n    schemaupdatefilepath = os.path.join(POSTGRES_SCHEMA_DIR, 'postgres_schema_update_v{}.{}.sql')\n\n    def __init__(self, *args, **kwargs):\n        super(CreateDatabaseSubcommand, self).__init__(*args, **kwargs)\n        self.sql_commands = None\n        self.schema_major = None\n        self.schema_minor = None\n        self.postgres_host = None\n        self.postgres_port = None\n        self.username = None\n        self.password = None\n        self.dbname = None\n        self.config_file = None\n        self.force = None\n\n    def initialize(self, context):\n        self.parser.add_argument(\n            '-a', '--postgres-host', default='localhost',\n            help='The host on which to create the database.')\n        self.parser.add_argument(\n            '-k', '--postgres-port', default='5432',\n            help='The port on which the PostgreSQL server is running.')\n        self.parser.add_argument(\n            '-u', '--username', default='postgres',\n            help='The username with which to connect to the server.')\n        self.parser.add_argument(\n            '-p', '--password',\n            help='The password for the user account.')\n        self.parser.add_argument(\n            '-d', '--dbname', default='wa',\n            help='The name of the database to create.')\n        self.parser.add_argument(\n            '-f', '--force', action='store_true',\n            help='Force overwrite the existing database if one exists.')\n        self.parser.add_argument(\n            '-F', '--force-update-config', action='store_true',\n            help='Force update the config file if an entry exists.')\n        self.parser.add_argument(\n            '-r', '--config-file', default=settings.user_config_file,\n            help='Path to the config file to be updated.')\n        self.parser.add_argument(\n            '-x', '--schema-version', action='store_true',\n            help='Display the current schema version.')\n        self.parser.add_argument(\n            '-U', '--upgrade', action='store_true',\n            help='Upgrade the database to use the latest schema version.')\n\n    def execute(self, state, args):  # pylint: disable=too-many-branches\n        if not psycopg2:\n            raise CommandError(\n                'The module psycopg2 is required for the wa '\n                + 'create database command.')\n\n        if args.dbname == 'postgres':\n            raise ValueError('Databasename to create cannot be postgres.')\n\n        self._parse_args(args)\n        self.schema_major, self.schema_minor, self.sql_commands = get_schema(self.schemafilepath)\n\n        # Display the version if needed and exit\n        if args.schema_version:\n            self.logger.info(\n                'The current schema version is {}.{}'.format(self.schema_major,\n                                                             self.schema_minor))\n            return\n\n        if args.upgrade:\n            self.update_schema()\n            return\n\n        # Open user configuration\n        with open(self.config_file, 'r') as config_file:\n            config = yaml.load(config_file)\n            if 'postgres' in config and not args.force_update_config:\n                raise CommandError(\n                    \"The entry 'postgres' already exists in the config file. \"\n                    + \"Please specify the -F flag to force an update.\")\n\n        possible_connection_errors = [\n            (\n                re.compile('FATAL:  role \".*\" does not exist'),\n                'Username does not exist or password is incorrect'\n            ),\n            (\n                re.compile('FATAL:  password authentication failed for user'),\n                'Password was incorrect'\n            ),\n            (\n                re.compile('fe_sendauth: no password supplied'),\n                'Passwordless connection is not enabled. '\n                'Please enable trust in pg_hba for this host '\n                'or use a password'\n            ),\n            (\n                re.compile('FATAL:  no pg_hba.conf entry for'),\n                'Host is not allowed to connect to the specified database '\n                'using this user according to pg_hba.conf. Please change the '\n                'rules in pg_hba or your connection method'\n            ),\n            (\n                re.compile('FATAL:  pg_hba.conf rejects connection'),\n                'Connection was rejected by pg_hba.conf'\n            ),\n        ]\n\n        def predicate(error, handle):\n            if handle[0].match(str(error)):\n                raise CommandError(handle[1] + ': \\n' + str(error))\n\n        # Attempt to create database\n        try:\n            self.create_database()\n        except OperationalError as e:\n            for handle in possible_connection_errors:\n                predicate(e, handle)\n            raise e\n\n        # Update the configuration file\n        self._update_configuration_file(config)\n\n    def create_database(self):\n        self._validate_version()\n\n        self._check_database_existence()\n\n        self._create_database_postgres()\n\n        self._apply_database_schema(self.sql_commands, self.schema_major, self.schema_minor)\n\n        self.logger.info(\n            \"Successfully created the database {}\".format(self.dbname))\n\n    def update_schema(self):\n        self._validate_version()\n        schema_major, schema_minor, _ = get_schema(self.schemafilepath)\n        meta_oid, current_major, current_minor = self._get_database_schema_version()\n\n        while not (schema_major == current_major and schema_minor == current_minor):\n            current_minor = self._update_schema_minors(current_major, current_minor, meta_oid)\n            current_major, current_minor = self._update_schema_major(current_major, current_minor, meta_oid)\n        msg = \"Database schema update of '{}' to v{}.{} complete\"\n        self.logger.info(msg.format(self.dbname, schema_major, schema_minor))\n\n    def _update_schema_minors(self, major, minor, meta_oid):\n        # Upgrade all available minor versions\n        while True:\n            minor += 1\n            schema_update = os.path.join(POSTGRES_SCHEMA_DIR,\n                                         self.schemaupdatefilepath.format(major, minor))\n            if not os.path.exists(schema_update):\n                break\n\n            _, _, sql_commands = get_schema(schema_update)\n            self._apply_database_schema(sql_commands, major, minor, meta_oid)\n            msg = \"Updated the database schema to v{}.{}\"\n            self.logger.debug(msg.format(major, minor))\n\n        # Return last existing update file version\n        return minor - 1\n\n    def _update_schema_major(self, current_major, current_minor, meta_oid):\n        current_major += 1\n        schema_update = os.path.join(POSTGRES_SCHEMA_DIR,\n                                     self.schemaupdatefilepath.format(current_major, 0))\n        if not os.path.exists(schema_update):\n            return (current_major - 1, current_minor)\n\n        # Reset minor to 0 with major version bump\n        current_minor = 0\n        _, _, sql_commands = get_schema(schema_update)\n        self._apply_database_schema(sql_commands, current_major, current_minor, meta_oid)\n        msg = \"Updated the database schema to v{}.{}\"\n        self.logger.debug(msg.format(current_major, current_minor))\n        return (current_major, current_minor)\n\n    def _validate_version(self):\n        conn = connect(user=self.username,\n                       password=self.password, host=self.postgres_host, port=self.postgres_port)\n        if conn.server_version < 90400:\n            msg = 'Postgres version too low. Please ensure that you are using atleast v9.4'\n            raise CommandError(msg)\n\n    def _get_database_schema_version(self):\n        conn = connect(dbname=self.dbname, user=self.username,\n                       password=self.password, host=self.postgres_host, port=self.postgres_port)\n        cursor = conn.cursor()\n        cursor.execute('''SELECT\n                                DatabaseMeta.oid,\n                                DatabaseMeta.schema_major,\n                                DatabaseMeta.schema_minor\n                          FROM\n                                DatabaseMeta;''')\n        return cursor.fetchone()\n\n    def _check_database_existence(self):\n        try:\n            connect(dbname=self.dbname, user=self.username,\n                    password=self.password, host=self.postgres_host, port=self.postgres_port)\n        except OperationalError as e:\n            # Expect an operational error (database's non-existence)\n            if not re.compile('FATAL:  database \".*\" does not exist').match(str(e)):\n                raise e\n        else:\n            if not self.force:\n                raise CommandError(\n                    \"Database {} already exists. \".format(self.dbname)\n                    + \"Please specify the -f flag to create it from afresh.\"\n                )\n\n    def _create_database_postgres(self):\n        conn = connect(dbname='postgres', user=self.username,\n                       password=self.password, host=self.postgres_host, port=self.postgres_port)\n        conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n        cursor = conn.cursor()\n        cursor.execute('DROP DATABASE IF EXISTS ' + self.dbname)\n        cursor.execute('CREATE DATABASE ' + self.dbname)\n        conn.commit()\n        cursor.close()\n        conn.close()\n\n    def _apply_database_schema(self, sql_commands, schema_major, schema_minor, meta_uuid=None):\n        conn = connect(dbname=self.dbname, user=self.username,\n                       password=self.password, host=self.postgres_host, port=self.postgres_port)\n        cursor = conn.cursor()\n        cursor.execute(sql_commands)\n\n        if not meta_uuid:\n            extras.register_uuid()\n            meta_uuid = uuid.uuid4()\n            cursor.execute(\"INSERT INTO DatabaseMeta VALUES (%s, %s, %s)\",\n                           (meta_uuid,\n                            schema_major,\n                            schema_minor\n                            ))\n        else:\n            cursor.execute(\"UPDATE DatabaseMeta SET schema_major = %s, schema_minor = %s WHERE oid = %s;\",\n                           (schema_major,\n                            schema_minor,\n                            meta_uuid\n                            ))\n\n        conn.commit()\n        cursor.close()\n        conn.close()\n\n    def _update_configuration_file(self, config):\n        ''' Update the user configuration file with the newly created database's\n            configuration.\n            '''\n        config['postgres'] = OrderedDict(\n            [('host', self.postgres_host), ('port', self.postgres_port),\n             ('dbname', self.dbname), ('username', self.username), ('password', self.password)])\n        with open(self.config_file, 'w+') as config_file:\n            yaml.dump(config, config_file)\n\n    def _parse_args(self, args):\n        self.postgres_host = args.postgres_host\n        self.postgres_port = args.postgres_port\n        self.username = args.username\n        self.password = args.password\n        self.dbname = args.dbname\n        self.config_file = args.config_file\n        self.force = args.force\n\n\nclass CreateAgendaSubcommand(SubCommand):\n\n    name = 'agenda'\n    description = \"\"\"\n    Create an agenda with the specified extensions enabled. And parameters set\n    to their default values.\n    \"\"\"\n\n    def initialize(self, context):\n        self.parser.add_argument('plugins', nargs='+',\n                                 help='Plugins to be added to the agendas')\n        self.parser.add_argument('-i', '--iterations', type=int, default=1,\n                                 help='Sets the number of iterations for all workloads')\n        self.parser.add_argument('-o', '--output', metavar='FILE',\n                                 help='Output file. If not specfied, STDOUT will be used instead.')\n\n    # pylint: disable=too-many-branches\n    def execute(self, state, args):\n        agenda = OrderedDict()\n        agenda['config'] = OrderedDict(augmentations=[], iterations=args.iterations)\n        agenda['workloads'] = []\n        target_desc = None\n\n        targets = {td.name: td for td in list_target_descriptions()}\n\n        for name in args.plugins:\n            if name in targets:\n                if target_desc is not None:\n                    raise ConfigError('Specifying multiple devices: {} and {}'.format(target_desc.name, name))\n                target_desc = targets[name]\n                agenda['config']['device'] = name\n                agenda['config']['device_config'] = target_desc.get_default_config()\n                continue\n\n            extcls = pluginloader.get_plugin_class(name)\n            config = pluginloader.get_default_config(name)\n\n            # Handle special case for EnergyInstrumentBackends\n            if issubclass(extcls, EnergyInstrumentBackend):\n                if 'energy_measurement' not in agenda['config']['augmentations']:\n                    energy_config = pluginloader.get_default_config('energy_measurement')\n                    agenda['config']['augmentations'].append('energy_measurement')\n                    agenda['config']['energy_measurement'] = energy_config\n                agenda['config']['energy_measurement']['instrument'] = extcls.name\n                agenda['config']['energy_measurement']['instrument_parameters'] = config\n            elif extcls.kind == 'workload':\n                entry = OrderedDict()\n                entry['name'] = extcls.name\n                if name != extcls.name:\n                    entry['label'] = name\n                entry['params'] = config\n                agenda['workloads'].append(entry)\n            else:\n                if extcls.kind in ('instrument', 'output_processor'):\n                    if extcls.name not in agenda['config']['augmentations']:\n                        agenda['config']['augmentations'].append(extcls.name)\n\n                if extcls.name not in agenda['config']:\n                    agenda['config'][extcls.name] = config\n\n        if args.output:\n            wfh = open(args.output, 'w')\n        else:\n            wfh = sys.stdout\n        yaml.dump(agenda, wfh, indent=4, default_flow_style=False)\n        if args.output:\n            wfh.close()\n\n\nclass CreateWorkloadSubcommand(SubCommand):\n\n    name = 'workload'\n    description = '''Create a new workload. By default, a basic workload template will be\n                     used but you can specify the `KIND` to choose a different template.'''\n\n    def initialize(self, context):\n        self.parser.add_argument('name', metavar='NAME',\n                                 help='Name of the workload to be created')\n        self.parser.add_argument('-p', '--path', metavar='PATH', default=None,\n                                 help='The location at which the workload will be created. If not specified, '\n                                      + 'this defaults to \"~/.workload_automation/plugins\".')\n        self.parser.add_argument('-f', '--force', action='store_true',\n                                 help='Create the new workload even if a workload with the specified '\n                                      + 'name already exists.')\n        self.parser.add_argument('-k', '--kind', metavar='KIND', default='basic', choices=list(create_funcs.keys()),\n                                 help='The type of workload to be created. The available options '\n                                      + 'are: {}'.format(', '.join(list(create_funcs.keys()))))\n\n    def execute(self, state, args):  # pylint: disable=R0201\n        where = args.path or 'local'\n        check_name = not args.force\n\n        try:\n            create_workload(args.name, args.kind, where, check_name)\n        except CommandError as e:\n            self.logger.error('ERROR: {}'.format(e))\n\n\nclass CreatePackageSubcommand(SubCommand):\n\n    name = 'package'\n    description = '''Create a new empty Python package for WA extensions. On installation,\n                     this package will \"advertise\" itself to WA so that Plugins within it will\n                     be loaded by WA when it runs.'''\n\n    def initialize(self, context):\n        self.parser.add_argument('name', metavar='NAME',\n                                 help='Name of the package to be created')\n        self.parser.add_argument('-p', '--path', metavar='PATH', default=None,\n                                 help='The location at which the new package will be created. If not specified, '\n                                      + 'current working directory will be used.')\n        self.parser.add_argument('-f', '--force', action='store_true',\n                                 help='Create the new package even if a file or directory with the same name '\n                                      'already exists at the specified location.')\n\n    def execute(self, state, args):  # pylint: disable=R0201\n        package_dir = args.path or os.path.abspath('.')\n        template_path = os.path.join(TEMPLATES_DIR, 'setup.template')\n        self.create_extensions_package(package_dir, args.name, template_path, args.force)\n\n    def create_extensions_package(self, location, name, setup_template_path, overwrite=False):\n        package_path = os.path.join(location, name)\n        if os.path.exists(package_path):\n            if overwrite:\n                self.logger.info('overwriting existing \"{}\"'.format(package_path))\n                shutil.rmtree(package_path)\n            else:\n                raise CommandError('Location \"{}\" already exists.'.format(package_path))\n        actual_package_path = os.path.join(package_path, name)\n        os.makedirs(actual_package_path)\n        setup_text = render_template(setup_template_path, {'package_name': name, 'user': getpass.getuser()})\n        with open(os.path.join(package_path, 'setup.py'), 'w') as wfh:\n            wfh.write(setup_text)\n        touch(os.path.join(actual_package_path, '__init__.py'))\n\n\nclass CreateCommand(ComplexCommand):\n\n    name = 'create'\n    description = '''\n    Used to create various WA-related objects (see positional arguments list\n    for what objects may be created).\\n\\nUse \"wa create <object> -h\" for\n    object-specific arguments.\n    '''\n    subcmd_classes = [\n        CreateDatabaseSubcommand,\n        CreateWorkloadSubcommand,\n        CreateAgendaSubcommand,\n        CreatePackageSubcommand,\n    ]\n\n\ndef create_workload(name, kind='basic', where='local', check_name=True, **kwargs):\n\n    if check_name:\n        if name in [wl.name for wl in pluginloader.list_plugins('workload')]:\n            raise CommandError('Workload with name \"{}\" already exists.'.format(name))\n\n    class_name = get_class_name(name)\n    if where == 'local':\n        workload_dir = _d(os.path.join(settings.plugins_directory, name))\n    else:\n        workload_dir = _d(os.path.join(where, name))\n\n    try:\n        # Note: `create_funcs` mapping is listed below\n        create_funcs[kind](workload_dir, name, kind, class_name, **kwargs)\n    except KeyError:\n        raise CommandError('Unknown workload type: {}'.format(kind))\n\n    # pylint: disable=superfluous-parens\n    print('Workload created in {}'.format(workload_dir))\n\n\ndef create_template_workload(path, name, kind, class_name):\n    source_file = os.path.join(path, '__init__.py')\n    with open(source_file, 'w') as wfh:\n        wfh.write(render_template('{}_workload'.format(kind), {'name': name, 'class_name': class_name}))\n\n\ndef create_uiautomator_template_workload(path, name, kind, class_name):\n    uiauto_path = os.path.join(path, 'uiauto')\n    create_uiauto_project(uiauto_path, name)\n    create_template_workload(path, name, kind, class_name)\n\n\ndef create_uiauto_project(path, name):\n    package_name = 'com.arm.wa.uiauto.' + name.lower()\n\n    copy_tree(os.path.join(TEMPLATES_DIR, 'uiauto', 'uiauto_workload_template'), path)\n\n    manifest_path = os.path.join(path, 'app', 'src', 'main')\n    mainifest = os.path.join(_d(manifest_path), 'AndroidManifest.xml')\n    with open(mainifest, 'w') as wfh:\n        wfh.write(render_template(os.path.join('uiauto', 'uiauto_AndroidManifest.xml'),\n                                  {'package_name': package_name}))\n\n    build_gradle_path = os.path.join(path, 'app')\n    build_gradle = os.path.join(_d(build_gradle_path), 'build.gradle')\n    with open(build_gradle, 'w') as wfh:\n        wfh.write(render_template(os.path.join('uiauto', 'uiauto_build.gradle'),\n                                  {'package_name': package_name}))\n\n    build_script = os.path.join(path, 'build.sh')\n    with open(build_script, 'w') as wfh:\n        wfh.write(render_template(os.path.join('uiauto', 'uiauto_build_script'),\n                                  {'package_name': package_name}))\n    os.chmod(build_script, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)\n\n    source_file = _f(os.path.join(path, 'app', 'src', 'main', 'java',\n                                  os.sep.join(package_name.split('.')[:-1]),\n                                  'UiAutomation.java'))\n    with open(source_file, 'w') as wfh:\n        wfh.write(render_template(os.path.join('uiauto', 'UiAutomation.java'),\n                                  {'name': name, 'package_name': package_name}))\n\n\n# Mapping of workload types to their corresponding creation method\ncreate_funcs = {\n    'basic': create_template_workload,\n    'apk': create_template_workload,\n    'revent': create_template_workload,\n    'apkrevent': create_template_workload,\n    'uiauto': create_uiautomator_template_workload,\n    'apkuiauto': create_uiautomator_template_workload,\n}\n\n\n# Utility functions\ndef render_template(name, params):\n    filepath = os.path.join(TEMPLATES_DIR, name)\n    with open(filepath) as fh:\n        text = fh.read()\n        template = string.Template(text)\n        return template.substitute(params)\n\n\ndef get_class_name(name, postfix=''):\n    name = identifier(name)\n    return ''.join(map(capitalize, name.split('_'))) + postfix\n\n\ndef touch(path):\n    with open(path, 'w') as _: # NOQA\n        pass\n"
  },
  {
    "path": "wa/commands/list.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa import Command\nfrom wa.framework import pluginloader\nfrom wa.framework.target.descriptor import list_target_descriptions\nfrom wa.utils.doc import get_summary\nfrom wa.utils.formatter import DescriptionListFormatter\n\n\nclass ListCommand(Command):\n\n    name = 'list'\n    description = 'List available WA plugins with a short description of each.'\n\n    def initialize(self, context):\n        kinds = get_kinds()\n        kinds.extend(['augmentations', 'all'])\n        self.parser.add_argument('kind', metavar='KIND',\n                                 help=('Specify the kind of plugin to list. Must be '\n                                       'one of: {}'.format(', '.join(sorted(kinds)))),\n                                 choices=sorted(kinds))\n        self.parser.add_argument('-n', '--name',\n                                 help='Filter results by the name specified')\n        self.parser.add_argument('-o', '--packaged-only', action='store_true',\n                                 help='''\n                                 Only list plugins packaged with WA itself. Do\n                                 not list plugins installed locally or from\n                                 other packages.\n                                 ''')\n        self.parser.add_argument('-p', '--platform',\n                                 help='''\n                                 Only list results that are supported by the\n                                 specified platform.\n                                 ''')\n\n    # pylint: disable=superfluous-parens\n    def execute(self, state, args):\n        filters = {}\n        if args.name:\n            filters['name'] = args.name\n\n        if args.kind == 'targets':\n            list_targets()\n        elif args.kind == 'augmentations':\n            print('instruments:')\n            args.kind = 'instruments'\n            list_plugins(args, filters)\n            print('\\noutput processors:')\n            args.kind = 'output_processors'\n            list_plugins(args, filters)\n        elif args.kind == 'all':\n            for kind in sorted(get_kinds()):\n                print('\\n{}:'.format(kind))\n                if kind == 'targets':\n                    list_targets()\n                else:\n                    args.kind = kind\n                    list_plugins(args, filters)\n        else:\n            list_plugins(args, filters)\n\n\ndef get_kinds():\n    kinds = pluginloader.kinds\n    if 'target_descriptor' in kinds:\n        kinds.remove('target_descriptor')\n        kinds.append('target')\n    return ['{}s'.format(name) for name in kinds]\n\n\n# pylint: disable=superfluous-parens\ndef list_targets():\n    targets = list_target_descriptions()\n    targets = sorted(targets, key=lambda x: x.name)\n\n    output = DescriptionListFormatter()\n    for target in targets:\n        output.add_item(target.description or '', target.name)\n    print(output.format_data())\n    print('')\n\n\ndef list_plugins(args, filters):\n    results = pluginloader.list_plugins(args.kind[:-1])\n    if filters or args.platform:\n        filtered_results = []\n        for result in results:\n            passed = True\n            for k, v in filters.items():\n                if getattr(result, k) != v:\n                    passed = False\n                    break\n            if passed and args.platform:\n                passed = check_platform(result, args.platform)\n            if passed:\n                filtered_results.append(result)\n    else:  # no filters specified\n        filtered_results = results\n\n    if filtered_results:\n        output = DescriptionListFormatter()\n        for result in sorted(filtered_results, key=lambda x: x.name):\n            output.add_item(get_summary(result), result.name)\n        print(output.format_data())\n\n    print('')\n\n\ndef check_platform(plugin, platform):\n    supported_platforms = getattr(plugin, 'supported_platforms', [])\n    if supported_platforms:\n        return platform in supported_platforms\n    return True\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema.sql",
    "content": "--!VERSION!1.6!ENDVERSION!\nCREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\nCREATE EXTENSION IF NOT EXISTS \"lo\";\n\n-- In future, it may be useful to implement rules on which Parameter oid fields can be none depeendent on the value in the type column;\n\nDROP TABLE IF EXISTS DatabaseMeta;\nDROP TABLE IF EXISTS Parameters;\nDROP TABLE IF EXISTS Classifiers;\nDROP TABLE IF EXISTS LargeObjects;\nDROP TABLE IF EXISTS Artifacts;\nDROP TABLE IF EXISTS Metrics;\nDROP TABLE IF EXISTS Augmentations;\nDROP TABLE IF EXISTS Jobs_Augs;\nDROP TABLE IF EXISTS ResourceGetters;\nDROP TABLE IF EXISTS Resource_Getters;\nDROP TABLE IF EXISTS Events;\nDROP TABLE IF EXISTS Targets;\nDROP TABLE IF EXISTS Jobs;\nDROP TABLE IF EXISTS Runs;\n\nDROP TYPE IF EXISTS status_enum;\nDROP TYPE IF EXISTS param_enum;\n\nCREATE TYPE status_enum AS ENUM ('UNKNOWN(0)','NEW(1)','PENDING(2)','STARTED(3)','CONNECTED(4)', 'INITIALIZED(5)', 'RUNNING(6)', 'OK(7)', 'PARTIAL(8)', 'FAILED(9)', 'ABORTED(10)', 'SKIPPED(11)');\n\nCREATE TYPE param_enum AS ENUM ('workload', 'resource_getter', 'augmentation', 'device', 'runtime', 'boot');\n\n-- In future, it might be useful to create an ENUM type for the artifact kind, or simply a generic enum type;\n\nCREATE TABLE DatabaseMeta (\n    oid uuid NOT NULL,\n    schema_major int,\n    schema_minor int,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Runs (\n    oid uuid NOT NULL,\n    event_summary text,\n    basepath text,\n    status status_enum,\n    timestamp timestamp,\n    run_name text,\n    project text,\n    project_stage text,\n    retry_on_status status_enum[],\n    max_retries int,\n    bail_on_init_failure boolean,\n    allow_phone_home boolean,\n    run_uuid uuid,\n    start_time timestamp,\n    end_time timestamp,\n    duration float,\n    metadata jsonb,\n    _pod_version int,\n    _pod_serialization_version int,\n    state jsonb,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Jobs (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    status status_enum,\n    retry int,\n    label text,\n    job_id text,\n    iterations int,\n    workload_name text,\n    metadata jsonb,\n    _pod_version int,\n    _pod_serialization_version int,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Targets (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    target text,\n    modules text[],\n    cpus text[],\n    os text,\n    os_version jsonb,\n    hostid bigint,\n    hostname text,\n    abi text,\n    is_rooted boolean,\n    kernel_version text,\n    kernel_release text,\n    kernel_sha1 text,\n    kernel_config text[],\n    sched_features text[],\n    page_size_kb int,\n    screen_resolution int[],\n    prop json,\n    android_id text,\n    _pod_version int,\n    _pod_serialization_version int,\n    system_id text,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Events (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    job_oid uuid references Jobs(oid),\n    timestamp timestamp,\n    message text,\n    _pod_version int,\n    _pod_serialization_version int,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Resource_Getters (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    name text,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Augmentations (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    name text,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Jobs_Augs (\n    oid uuid NOT NULL,\n    job_oid uuid NOT NULL references Jobs(oid) ON DELETE CASCADE,\n    augmentation_oid uuid NOT NULL references Augmentations(oid) ON DELETE CASCADE,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Metrics (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    job_oid uuid references Jobs(oid),\n    name text,\n    value double precision,\n    units text,\n    lower_is_better boolean,\n    _pod_version int,\n    _pod_serialization_version int,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE LargeObjects (\n    oid uuid NOT NULL,\n    lo_oid lo NOT NULL,\n    PRIMARY KEY (oid)\n);\n\n-- Trigger that allows you to manage large objects from the LO table directly;\nCREATE TRIGGER t_raster BEFORE UPDATE OR DELETE ON LargeObjects\n    FOR EACH ROW EXECUTE PROCEDURE lo_manage(lo_oid);\n\nCREATE TABLE Artifacts (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    job_oid uuid references Jobs(oid),\n    name text,\n    large_object_uuid uuid NOT NULL references LargeObjects(oid),\n    description text,\n    kind text,\n    _pod_version int,\n    _pod_serialization_version int,\n    is_dir boolean,\n    PRIMARY KEY (oid)\n);\n\nCREATE RULE del_lo AS\n    ON DELETE TO Artifacts\n    DO DELETE FROM LargeObjects\n        WHERE LargeObjects.oid = old.large_object_uuid\n;\n\nCREATE TABLE Classifiers (\n    oid uuid NOT NULL,\n    artifact_oid uuid references Artifacts(oid) ON DELETE CASCADE,\n    metric_oid uuid references Metrics(oid) ON DELETE CASCADE,\n    job_oid uuid references Jobs(oid) ON DELETE CASCADE,\n    run_oid uuid references Runs(oid) ON DELETE CASCADE,\n    key text,\n    value text,\n    PRIMARY KEY (oid)\n);\n\nCREATE TABLE Parameters (\n    oid uuid NOT NULL,\n    run_oid uuid NOT NULL references Runs(oid) ON DELETE CASCADE,\n    job_oid uuid references Jobs(oid),\n    augmentation_oid uuid references Augmentations(oid),\n    resource_getter_oid uuid references Resource_Getters(oid),\n    name text,\n    value text,\n    value_type text,\n    type param_enum,\n    PRIMARY KEY (oid)\n);\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema_update_v1.2.sql",
    "content": "ALTER TABLE resourcegetters RENAME TO resource_getters;\n\nALTER TABLE classifiers ADD COLUMN job_oid uuid references Jobs(oid);\nALTER TABLE classifiers ADD COLUMN run_oid uuid references Runs(oid);\n\nALTER TABLE targets ADD COLUMN page_size_kb int;\nALTER TABLE targets ADD COLUMN screen_resolution int[];\nALTER TABLE targets ADD COLUMN prop text;\nALTER TABLE targets ADD COLUMN android_id text;\nALTER TABLE targets ADD COLUMN _pod_version int;\nALTER TABLE targets ADD COLUMN _pod_serialization_version int;\n\nALTER TABLE jobs RENAME COLUMN retries TO retry;\nALTER TABLE jobs ADD COLUMN _pod_version int;\nALTER TABLE jobs ADD COLUMN _pod_serialization_version int;\n\nALTER TABLE runs ADD COLUMN project_stage text;\nALTER TABLE runs ADD COLUMN state jsonb;\nALTER TABLE runs ADD COLUMN duration float;\nALTER TABLE runs ADD COLUMN _pod_version int;\nALTER TABLE runs ADD COLUMN _pod_serialization_version int;\n\nALTER TABLE artifacts ADD COLUMN _pod_version int;\nALTER TABLE artifacts ADD COLUMN _pod_serialization_version int;\n\nALTER TABLE events ADD COLUMN _pod_version int;\nALTER TABLE events ADD COLUMN _pod_serialization_version int;\n\nALTER TABLE metrics ADD COLUMN _pod_version int;\nALTER TABLE metrics ADD COLUMN _pod_serialization_version int;\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema_update_v1.3.sql",
    "content": "ALTER TABLE targets ADD COLUMN system_id text;\n\nALTER TABLE artifacts ADD COLUMN is_dir boolean;\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema_update_v1.4.sql",
    "content": "ALTER TABLE targets ADD COLUMN modules text[];\n\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema_update_v1.5.sql",
    "content": "ALTER TABLE targets ALTER hostid TYPE BIGINT;\n"
  },
  {
    "path": "wa/commands/postgres_schemas/postgres_schema_update_v1.6.sql",
    "content": "ALTER TABLE jobs\n    DROP CONSTRAINT jobs_run_oid_fkey,\n    ADD CONSTRAINT jobs_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE targets\n    DROP CONSTRAINT targets_run_oid_fkey,\n    ADD CONSTRAINT targets_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE events\n    DROP CONSTRAINT events_run_oid_fkey,\n    ADD CONSTRAINT events_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE resource_getters\n    DROP CONSTRAINT resource_getters_run_oid_fkey,\n    ADD CONSTRAINT resource_getters_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE augmentations\n    DROP CONSTRAINT augmentations_run_oid_fkey,\n    ADD CONSTRAINT augmentations_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE jobs_augs\n    DROP CONSTRAINT jobs_augs_job_oid_fkey,\n    DROP CONSTRAINT jobs_augs_augmentation_oid_fkey,\n    ADD CONSTRAINT jobs_augs_job_oid_fkey\n        FOREIGN KEY (job_oid)\n        REFERENCES Jobs(oid)\n        ON DELETE CASCADE,\n    ADD CONSTRAINT jobs_augs_augmentation_oid_fkey\n        FOREIGN KEY (augmentation_oid)\n        REFERENCES Augmentations(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE metrics\n    DROP CONSTRAINT metrics_run_oid_fkey,\n    ADD CONSTRAINT metrics_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE artifacts\n    DROP CONSTRAINT artifacts_run_oid_fkey,\n    ADD CONSTRAINT artifacts_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nCREATE RULE del_lo AS\n    ON DELETE TO Artifacts\n    DO DELETE FROM LargeObjects\n        WHERE LargeObjects.oid = old.large_object_uuid\n;\n\nALTER TABLE classifiers\n    DROP CONSTRAINT classifiers_artifact_oid_fkey,\n    DROP CONSTRAINT classifiers_metric_oid_fkey,\n    DROP CONSTRAINT classifiers_job_oid_fkey,\n    DROP CONSTRAINT classifiers_run_oid_fkey,\n\n    ADD CONSTRAINT classifiers_artifact_oid_fkey\n        FOREIGN KEY (artifact_oid)\n        REFERENCES artifacts(oid)\n        ON DELETE CASCADE,\n\n    ADD CONSTRAINT classifiers_metric_oid_fkey\n        FOREIGN KEY (metric_oid)\n        REFERENCES metrics(oid)\n        ON DELETE CASCADE,\n\n    ADD CONSTRAINT classifiers_job_oid_fkey\n        FOREIGN KEY (job_oid)\n        REFERENCES jobs(oid)\n        ON DELETE CASCADE,\n\n    ADD CONSTRAINT classifiers_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n\nALTER TABLE parameters\n    DROP CONSTRAINT parameters_run_oid_fkey,\n    ADD CONSTRAINT parameters_run_oid_fkey\n        FOREIGN KEY (run_oid)\n        REFERENCES runs(oid)\n        ON DELETE CASCADE\n;\n"
  },
  {
    "path": "wa/commands/process.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import Command\nfrom wa import discover_wa_outputs\nfrom wa.framework.configuration.core import Status\nfrom wa.framework.exception import CommandError\nfrom wa.framework.output import RunOutput\nfrom wa.framework.output_processor import ProcessorManager\nfrom wa.utils import log\n\n\nclass ProcessContext(object):\n\n    def __init__(self):\n        self.run_output = None\n        self.target_info = None\n        self.job_output = None\n\n    def add_augmentation(self, aug):\n        pass\n\n\nclass ProcessCommand(Command):\n\n    name = 'process'\n    description = 'Process the output from previously run workloads.'\n\n    def initialize(self, context):\n        self.parser.add_argument('directory', metavar='DIR',\n                                 help=\"\"\"\n                                 Specify a directory containing the data\n                                 from a previous run to be processed.\n                                 \"\"\")\n        self.parser.add_argument('-p', '--processor', action='append',\n                                 dest='additional_processors', metavar='OutputProcessor',\n                                 help=\"\"\"\n                                 Specify an output processor to add from the\n                                 command line. This can be used to run a\n                                 processor that is not normally used without\n                                 introducing permanent change to the config\n                                 (which one might then forget to revert). This\n                                 option may be specified multiple times.\n                                 \"\"\")\n        self.parser.add_argument('-f', '--force', action='store_true',\n                                 help=\"\"\"\n                                 Run processors that have already been run. By\n                                 default these will be skipped. Also, forces\n                                 processing of in-progress runs.\n                                 \"\"\")\n        self.parser.add_argument('-r', '--recursive', action='store_true',\n                                 help=\"\"\"\n                                 Walk the specified directory to process\n                                 all of the previous runs contained within\n                                 instead of just processing the root.\n                                 \"\"\")\n\n    def execute(self, config, args):  # pylint: disable=arguments-differ,too-many-branches,too-many-statements\n        process_directory = os.path.expandvars(args.directory)\n        self.logger.debug('Using process directory: {}'.format(process_directory))\n        if not os.path.exists(process_directory):\n            msg = 'Path `{}` does not exist, please specify a valid path.'\n            raise CommandError(msg.format(process_directory))\n        if not args.recursive:\n            output_list = [RunOutput(process_directory)]\n        else:\n            output_list = list(discover_wa_outputs(process_directory))\n\n        pc = ProcessContext()\n        for run_output in output_list:\n            if run_output.status < Status.OK and not args.force:\n                msg = 'Skipping {} as it has not completed -- {}'\n                self.logger.info(msg.format(run_output.basepath, run_output.status))\n                continue\n\n            pc.run_output = run_output\n            pc.target_info = run_output.target_info\n\n            if not args.recursive:\n                self.logger.info('Installing output processors')\n            else:\n                self.logger.info('Install output processors for run in path `{}`'\n                                 .format(run_output.basepath))\n\n            logfile = os.path.join(run_output.basepath, 'process.log')\n            i = 0\n            while os.path.exists(logfile):\n                i += 1\n                logfile = os.path.join(run_output.basepath, 'process-{}.log'.format(i))\n            log.add_file(logfile)\n\n            pm = ProcessorManager(loader=config.plugin_cache)\n            for proc in config.get_processors():\n                pm.install(proc, pc)\n            if args.additional_processors:\n                for proc in args.additional_processors:\n                    # Do not add any processors that are already present since\n                    # duplicate entries do not get disabled.\n                    try:\n                        pm.get_output_processor(proc)\n                    except ValueError:\n                        pm.install(proc, pc)\n\n            pm.validate()\n            pm.initialize(pc)\n\n            for job_output in run_output.jobs:\n                if job_output.status < Status.OK or job_output.status in [Status.SKIPPED, Status.ABORTED]:\n                    msg = 'Skipping job {} {} iteration {} -- {}'\n                    self.logger.info(msg.format(job_output.id, job_output.label,\n                                                job_output.iteration, job_output.status))\n                    continue\n\n                pc.job_output = job_output\n                pm.enable_all()\n                if not args.force:\n                    for augmentation in job_output.spec.augmentations:\n                        try:\n                            pm.disable(augmentation)\n                        except ValueError:\n                            pass\n\n                msg = 'Processing job {} {} iteration {}'\n                self.logger.info(msg.format(job_output.id, job_output.label,\n                                            job_output.iteration))\n                pm.process_job_output(pc)\n                pm.export_job_output(pc)\n\n                job_output.write_result()\n\n            pm.enable_all()\n            if not args.force:\n                for augmentation in run_output.augmentations:\n                    try:\n                        pm.disable(augmentation)\n                    except ValueError:\n                        pass\n\n            self.logger.info('Processing run')\n            pm.process_run_output(pc)\n            pm.export_run_output(pc)\n            pm.finalize(pc)\n\n            run_output.write_info()\n            run_output.write_result()\n            self.logger.info('Done.')\n"
  },
  {
    "path": "wa/commands/report.py",
    "content": "from collections import Counter\nfrom datetime import datetime, timedelta\nimport logging\nimport os\n\nfrom wa import Command, settings\nfrom wa.framework.configuration.core import Status\nfrom wa.framework.output import RunOutput, discover_wa_outputs\nfrom wa.utils.doc import underline\nfrom wa.utils.log import COLOR_MAP, RESET_COLOR\nfrom wa.utils.terminalsize import get_terminal_size\n\n\nclass ReportCommand(Command):\n\n    name = 'report'\n    description = '''\n    Monitor an ongoing run and provide information on its progress.\n\n    Specify the output directory of the run you would like the monitor;\n    alternatively report will attempt to discover wa output directories\n    within the current directory. The output includes run information such as\n    the UUID, start time, duration, project name and a short summary of the\n    run's progress (number of completed jobs, the number of jobs in each\n    different status).\n\n    If verbose output is specified, the output includes a list of all events\n    labelled as not specific to any job, followed by a list of the jobs in the\n    order executed, with their retries (if any), current status and, if the job\n    is finished, a list of events that occurred during that job's execution.\n\n    This is an example of a job status line:\n\n        wk1 (exoplayer) [1] - 2, PARTIAL\n\n    It contains two entries delimited by a comma: the job's descriptor followed\n    by its completion status (``PARTIAL``, in this case). The descriptor\n    consists of the following elements:\n\n        - the job ID (``wk1``)\n        - the job label (which defaults to the workload name) in parentheses\n        - job iteration number in square brakets (``1`` in this case)\n        - a hyphen followed by the retry attempt number.\n            (note: this will only be shown if the job has been retried as least\n            once. If the job has not yet run, or if it completed on the first\n            attempt, the hyphen and retry count -- which in that case would be\n            zero -- will not appear).\n    '''\n\n    def initialize(self, context):\n        self.parser.add_argument('-d', '--directory',\n                                 help='''\n                                 Specify the WA output path. report will\n                                 otherwise attempt to discover output\n                                 directories in the current directory.\n                                 ''')\n\n    def execute(self, state, args):\n        if args.directory:\n            output_path = args.directory\n            run_output = RunOutput(output_path)\n        else:\n            possible_outputs = list(discover_wa_outputs(os.getcwd()))\n            num_paths = len(possible_outputs)\n\n            if num_paths > 1:\n                print('More than one possible output directory found,'\n                      ' please choose a path from the following:'\n                      )\n\n                for i in range(num_paths):\n                    print(\"{}: {}\".format(i, possible_outputs[i].basepath))\n\n                while True:\n                    try:\n                        select = int(input())\n                    except ValueError:\n                        print(\"Please select a valid path number\")\n                        continue\n\n                    if select not in range(num_paths):\n                        print(\"Please select a valid path number\")\n                        continue\n                    break\n\n                run_output = possible_outputs[select]\n\n            else:\n                run_output = possible_outputs[0]\n\n        rm = RunMonitor(run_output)\n        print(rm.generate_output(args.verbose))\n\n\nclass RunMonitor:\n\n    @property\n    def elapsed_time(self):\n        if self._elapsed is None:\n            if self.ro.info.duration is None:\n                self._elapsed = datetime.utcnow() - self.ro.info.start_time\n            else:\n                self._elapsed = self.ro.info.duration\n        return self._elapsed\n\n    @property\n    def job_outputs(self):\n        if self._job_outputs is None:\n            self._job_outputs = {\n                (j_o.id, j_o.label, j_o.iteration): j_o for j_o in self.ro.jobs\n            }\n        return self._job_outputs\n\n    @property\n    def projected_duration(self):\n        elapsed = self.elapsed_time.total_seconds()\n        proj = timedelta(seconds=elapsed * (len(self.jobs) / len(self.segmented['finished'])))\n        return proj - self.elapsed_time\n\n    def __init__(self, ro):\n        self.ro = ro\n        self._elapsed = None\n        self._p_duration = None\n        self._job_outputs = None\n        self._termwidth = None\n        self._fmt = _simple_formatter()\n        self.get_data()\n\n    def get_data(self):\n        self.jobs = [state for label_id, state in self.ro.state.jobs.items()]\n        if self.jobs:\n            rc = self.ro.run_config\n            self.segmented = segment_jobs_by_state(self.jobs,\n                                                   rc.max_retries,\n                                                   rc.retry_on_status\n                                                   )\n\n    def generate_run_header(self):\n        info = self.ro.info\n\n        header = underline('Run Info')\n        header += \"UUID: {}\\n\".format(info.uuid)\n        if info.run_name:\n            header += \"Run name: {}\\n\".format(info.run_name)\n        if info.project:\n            header += \"Project: {}\\n\".format(info.project)\n        if info.project_stage:\n            header += \"Project stage: {}\\n\".format(info.project_stage)\n\n        if info.start_time:\n            duration = _seconds_as_smh(self.elapsed_time.total_seconds())\n            header += (\"Start time: {}\\n\"\n                       \"Duration: {:02}:{:02}:{:02}\\n\"\n                       ).format(info.start_time,\n                                duration[2], duration[1], duration[0],\n                                )\n            if self.segmented['finished'] and not info.end_time:\n                p_duration = _seconds_as_smh(self.projected_duration.total_seconds())\n                header += \"Projected time remaining: {:02}:{:02}:{:02}\\n\".format(\n                    p_duration[2], p_duration[1], p_duration[0]\n                )\n\n            elif self.ro.info.end_time:\n                header += \"End time: {}\\n\".format(info.end_time)\n\n        return header + '\\n'\n\n    def generate_job_summary(self):\n        total = len(self.jobs)\n        num_fin = len(self.segmented['finished'])\n\n        summary = underline('Job Summary')\n        summary += 'Total: {}, Completed: {} ({}%)\\n'.format(\n            total, num_fin, (num_fin / total) * 100\n        ) if total > 0 else 'No jobs created\\n'\n\n        ctr = Counter()\n        for run_state, jobs in ((k, v) for k, v in self.segmented.items() if v):\n            if run_state == 'finished':\n                ctr.update([job.status.name.lower() for job in jobs])\n            else:\n                ctr[run_state] += len(jobs)\n\n        return summary + ', '.join(\n            [str(count) + ' ' + self._fmt.highlight_keyword(status) for status, count in ctr.items()]\n        ) + '\\n\\n'\n\n    def generate_job_detail(self):\n        detail = underline('Job Detail')\n        for job in self.jobs:\n            detail += ('{} ({}) [{}]{}, {}\\n').format(\n                job.id,\n                job.label,\n                job.iteration,\n                ' - ' + str(job.retries)if job.retries else '',\n                self._fmt.highlight_keyword(str(job.status))\n            )\n\n            job_output = self.job_outputs[(job.id, job.label, job.iteration)]\n            for event in job_output.events:\n                detail += self._fmt.fit_term_width(\n                    '\\t{}\\n'.format(event.summary)\n                )\n        return detail\n\n    def generate_run_detail(self):\n        detail = underline('Run Events') if self.ro.events else ''\n\n        for event in self.ro.events:\n            detail += '{}\\n'.format(event.summary)\n\n        return detail + '\\n'\n\n    def generate_output(self, verbose):\n        if not self.jobs:\n            return 'No jobs found in output directory\\n'\n\n        output = self.generate_run_header()\n        output += self.generate_job_summary()\n\n        if verbose:\n            output += self.generate_run_detail()\n            output += self.generate_job_detail()\n\n        return output\n\n\ndef _seconds_as_smh(seconds):\n    seconds = int(seconds)\n    hours = seconds // 3600\n    minutes = (seconds % 3600) // 60\n    seconds = seconds % 60\n    return seconds, minutes, hours\n\n\ndef segment_jobs_by_state(jobstates, max_retries, retry_status):\n    finished_states = [\n        Status.PARTIAL, Status.FAILED,\n        Status.ABORTED, Status.OK, Status.SKIPPED\n    ]\n\n    segmented = {\n        'finished': [], 'other': [], 'running': [],\n        'pending': [], 'uninitialized': []\n    }\n\n    for jobstate in jobstates:\n        if (jobstate.status in retry_status) and jobstate.retries < max_retries:\n            segmented['running'].append(jobstate)\n        elif jobstate.status in finished_states:\n            segmented['finished'].append(jobstate)\n        elif jobstate.status == Status.RUNNING:\n            segmented['running'].append(jobstate)\n        elif jobstate.status == Status.PENDING:\n            segmented['pending'].append(jobstate)\n        elif jobstate.status == Status.NEW:\n            segmented['uninitialized'].append(jobstate)\n        else:\n            segmented['other'].append(jobstate)\n\n    return segmented\n\n\nclass _simple_formatter:\n    color_map = {\n        'running': COLOR_MAP[logging.INFO],\n        'partial': COLOR_MAP[logging.WARNING],\n        'failed': COLOR_MAP[logging.CRITICAL],\n        'aborted': COLOR_MAP[logging.ERROR]\n    }\n\n    def __init__(self):\n        self.termwidth = get_terminal_size()[0]\n        self.color = settings.logging['color']\n\n    def fit_term_width(self, text):\n        text = text.expandtabs()\n        if len(text) <= self.termwidth:\n            return text\n        else:\n            return text[0:self.termwidth - 4] + \" ...\\n\"\n\n    def highlight_keyword(self, kw):\n        if not self.color or kw not in _simple_formatter.color_map:\n            return kw\n\n        color = _simple_formatter.color_map[kw.lower()]\n        return '{}{}{}'.format(color, kw, RESET_COLOR)\n"
  },
  {
    "path": "wa/commands/revent.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport sys\nfrom time import sleep\n\nfrom wa import Command\nfrom wa.framework import pluginloader\nfrom wa.framework.exception import ConfigError\nfrom wa.framework.resource import ResourceResolver\nfrom wa.framework.target.manager import TargetManager\nfrom wa.utils.revent import ReventRecorder\n\n\nclass RecordCommand(Command):\n\n    name = 'record'\n    description = '''\n    Performs a revent recording\n\n    This command helps making revent recordings. It will automatically\n    deploy revent and has options to automatically open apps and record\n    specified stages of a workload.\n\n    Revent allows you to record raw inputs such as screen swipes or button presses.\n    This can be useful for recording inputs for workloads such as games that don't\n    have XML UI layouts that can be used with UIAutomator. As a drawback from this,\n    revent recordings are specific to the device type they were recorded on.\n\n    WA uses two parts to the names of revent recordings in the format,\n    {device_name}.{suffix}.revent.\n\n     - device_name can either be specified manually with the ``-d`` argument or\n       it can be automatically determined. On Android device it will be obtained\n       from ``build.prop``, on Linux devices it is obtained from ``/proc/device-tree/model``.\n     - suffix is used by WA to determine which part of the app execution the\n       recording is for, currently these are either ``setup``, ``run``, ``extract_results``\n       or ``teardown``. All stages are optional for recording and these should\n       be specified with the ``-s``, ``-r``, ``-e`` or ``-t`` arguments respectively,\n       or optionally ``-a`` to indicate all stages should be recorded.\n    '''\n\n    def __init__(self, **kwargs):\n        super(RecordCommand, self).__init__(**kwargs)\n        self.tm = None\n        self.target = None\n        self.revent_recorder = None\n\n    def initialize(self, context):\n        self.parser.add_argument('-d', '--device', metavar='DEVICE',\n                                 help='''\n                                 Specify the device on which to run. This will\n                                 take precedence over the device (if any)\n                                 specified in configuration.\n                                 ''')\n        self.parser.add_argument('-o', '--output', help='Specify the output file', metavar='FILE')\n        self.parser.add_argument('-s', '--setup', help='Record a recording for setup stage',\n                                 action='store_true')\n        self.parser.add_argument('-r', '--run', help='Record a recording for run stage',\n                                 action='store_true')\n        self.parser.add_argument('-e', '--extract_results', help='Record a recording for extract_results stage',\n                                 action='store_true')\n        self.parser.add_argument('-t', '--teardown', help='Record a recording for teardown stage',\n                                 action='store_true')\n        self.parser.add_argument('-a', '--all', help='Record recordings for available stages',\n                                 action='store_true')\n\n        # Need validation\n        self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',\n                                 action='store_true')\n        group = self.parser.add_mutually_exclusive_group(required=False)\n        group.add_argument('-p', '--package', help='Android package to launch before recording')\n        group.add_argument('-w', '--workload', help='Name of a revent workload (mostly games)')\n\n    def validate_args(self, args):\n        if args.clear and not (args.package or args.workload):\n            self.logger.error(\"Package/Workload must be specified if you want to clear cache\")\n            sys.exit()\n        if args.workload and args.output:\n            self.logger.error(\"Output file cannot be specified with Workload\")\n            sys.exit()\n        if not args.workload and (args.setup or args.extract_results\n                                  or args.teardown or args.all):\n            self.logger.error(\"Cannot specify a recording stage without a Workload\")\n            sys.exit()\n        if args.workload and not any([args.all, args.teardown, args.extract_results, args.run, args.setup]):\n            self.logger.error(\"Please specify which workload stages you wish to record\")\n            sys.exit()\n\n    def execute(self, state, args):\n        self.validate_args(args)\n        state.run_config.merge_device_config(state.plugin_cache)\n        if args.device:\n            device = args.device\n            device_config = {}\n        else:\n            device = state.run_config.device\n            device_config = state.run_config.device_config or {}\n\n        if args.output:\n            outdir = os.path.basename(args.output)\n        else:\n            outdir = os.getcwd()\n\n        self.tm = TargetManager(device, device_config, outdir)\n        self.tm.initialize()\n        self.target = self.tm.target\n        self.revent_recorder = ReventRecorder(self.target)\n        self.revent_recorder.deploy()\n\n        if args.workload:\n            self.workload_record(args)\n        elif args.package:\n            self.package_record(args)\n        else:\n            self.manual_record(args)\n\n        self.revent_recorder.remove()\n\n    def record(self, revent_file, name, output_path):\n        msg = 'Press Enter when you are ready to record {}...'\n        self.logger.info(msg.format(name))\n        input('')\n        self.revent_recorder.start_record(revent_file)\n        msg = 'Press Enter when you have finished recording {}...'\n        self.logger.info(msg.format(name))\n        input('')\n        self.revent_recorder.stop_record()\n\n        if not os.path.isdir(output_path):\n            os.makedirs(output_path)\n\n        revent_file_name = self.target.path.basename(revent_file)\n        host_path = os.path.join(output_path, revent_file_name)\n        if os.path.exists(host_path):\n            msg = 'Revent file \\'{}\\' already exists, overwrite? [y/n]'\n            self.logger.info(msg.format(revent_file_name))\n            if input('') == 'y':\n                os.remove(host_path)\n            else:\n                msg = 'Did not pull and overwrite \\'{}\\''\n                self.logger.warning(msg.format(revent_file_name))\n                return\n        msg = 'Pulling \\'{}\\' from device'\n        self.logger.info(msg.format(self.target.path.basename(revent_file)))\n        self.target.pull(revent_file, output_path, as_root=self.target.is_rooted)\n\n    def manual_record(self, args):\n        output_path, file_name = self._split_revent_location(args.output)\n        revent_file = self.target.get_workpath(file_name)\n        self.record(revent_file, '', output_path)\n        msg = 'Recording is available at: \\'{}\\''\n        self.logger.info(msg.format(os.path.join(output_path, file_name)))\n\n    def package_record(self, args):\n        if self.target.os != 'android' and self.target.os != 'chromeos':\n            raise ConfigError('Target does not appear to be running Android')\n        if self.target.os == 'chromeos' and not self.target.supports_android:\n            raise ConfigError('Target does not appear to support Android')\n        if args.clear:\n            self.target.execute('pm clear {}'.format(args.package))\n        self.logger.info('Starting {}'.format(args.package))\n        cmd = 'monkey -p {} -c android.intent.category.LAUNCHER 1'\n        self.target.execute(cmd.format(args.package))\n\n        output_path, file_name = self._split_revent_location(args.output)\n        revent_file = self.target.get_workpath(file_name)\n        self.record(revent_file, '', output_path)\n        msg = 'Recording is available at: \\'{}\\''\n        self.logger.info(msg.format(os.path.join(output_path, file_name)))\n\n    def workload_record(self, args):\n        context = LightContext(self.tm)\n        setup_revent = '{}.setup.revent'.format(self.target.model)\n        run_revent = '{}.run.revent'.format(self.target.model)\n        extract_results_revent = '{}.extract_results.revent'.format(self.target.model)\n        teardown_file_revent = '{}.teardown.revent'.format(self.target.model)\n        setup_file = self.target.get_workpath(setup_revent)\n        run_file = self.target.get_workpath(run_revent)\n        extract_results_file = self.target.get_workpath(extract_results_revent)\n        teardown_file = self.target.get_workpath(teardown_file_revent)\n\n        self.logger.info('Deploying {}'.format(args.workload))\n        workload = pluginloader.get_workload(args.workload, self.target)\n        # Setup apk if android workload\n        if hasattr(workload, 'apk'):\n            workload.apk.initialize(context)\n            workload.apk.setup(context)\n            sleep(workload.loading_time)\n\n        output_path = os.path.join(workload.dependencies_directory,\n                                   'revent_files')\n        if args.setup or args.all:\n            self.record(setup_file, 'SETUP', output_path)\n        if args.run or args.all:\n            self.record(run_file, 'RUN', output_path)\n        if args.extract_results or args.all:\n            self.record(extract_results_file, 'EXTRACT_RESULTS', output_path)\n        if args.teardown or args.all:\n            self.record(teardown_file, 'TEARDOWN', output_path)\n        self.logger.info('Tearing down {}'.format(args.workload))\n        workload.teardown(context)\n        self.logger.info('Recording(s) are available at: \\'{}\\''.format(output_path))\n\n    def _split_revent_location(self, output):\n        output_path = None\n        file_name = None\n        if output:\n            output_path, file_name, = os.path.split(output)\n\n        if not file_name:\n            file_name = '{}.revent'.format(self.target.model)\n        if not output_path:\n            output_path = os.getcwd()\n\n        return output_path, file_name\n\n\nclass ReplayCommand(Command):\n\n    name = 'replay'\n    description = '''\n    Replay a revent recording\n\n    Revent allows you to record raw inputs such as screen swipes or button presses.\n    See ``wa show record`` to see how to make an revent recording.\n    '''\n\n    def initialize(self, context):\n        self.parser.add_argument('recording', help='The name of the file to replay',\n                                 metavar='FILE')\n        self.parser.add_argument('-d', '--device', help='The name of the device')\n        self.parser.add_argument('-p', '--package', help='Package to launch before recording')\n        self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',\n                                 action=\"store_true\")\n\n    # pylint: disable=W0201\n    def execute(self, state, args):\n        state.run_config.merge_device_config(state.plugin_cache)\n        if args.device:\n            device = args.device\n            device_config = {}\n        else:\n            device = state.run_config.device\n            device_config = state.run_config.device_config or {}\n\n        target_manager = TargetManager(device, device_config, None)\n        target_manager.initialize()\n        self.target = target_manager.target\n        revent_file = self.target.path.join(self.target.working_directory,\n                                            os.path.split(args.recording)[1])\n\n        self.logger.info(\"Pushing file to target\")\n        self.target.push(args.recording, self.target.working_directory)\n\n        revent_recorder = ReventRecorder(target_manager.target)\n        revent_recorder.deploy()\n\n        if args.clear:\n            self.target.execute('pm clear {}'.format(args.package))\n\n        if args.package:\n            self.logger.info('Starting {}'.format(args.package))\n            cmd = 'monkey -p {} -c android.intent.category.LAUNCHER 1'\n            self.target.execute(cmd.format(args.package))\n\n        self.logger.info(\"Starting replay\")\n        revent_recorder.replay(revent_file)\n        self.logger.info(\"Finished replay\")\n        revent_recorder.remove()\n\n\n# Used to satisfy the workload API\nclass LightContext(object):\n\n    def __init__(self, tm):\n        self.tm = tm\n        self.resolver = ResourceResolver()\n        self.resolver.load()\n\n    def get_resource(self, resource, strict=True):\n        return self.resolver.get(resource, strict)\n\n    def update_metadata(self, key, *args):\n        pass\n\n    get = get_resource\n"
  },
  {
    "path": "wa/commands/run.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\nimport sys\nimport shutil\n\nimport wa\nfrom wa import Command, settings\nfrom wa.framework import pluginloader\nfrom wa.framework.configuration.parsers import AgendaParser\nfrom wa.framework.execution import Executor\nfrom wa.framework.output import init_run_output\nfrom wa.framework.exception import NotFoundError, ConfigError\nfrom wa.utils import log\nfrom wa.utils.types import toggle_set\n\n\nclass RunCommand(Command):\n\n    name = 'run'\n    description = '''\n    Execute automated workloads on a remote device and process the resulting output.\n\n    '''\n\n    def initialize(self, context):\n        self.parser.add_argument('agenda', metavar='AGENDA',\n                                 help=\"\"\"\n                                 Agenda for this workload automation run. This\n                                 defines which workloads will be executed, how\n                                 many times, with which tunables, etc.  See\n                                 example agendas in {} for an example of how\n                                 this file should be structured.\n                                 \"\"\".format(os.path.dirname(wa.__file__)))\n        self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,\n                                 help=\"\"\"\n                                 Specify a directory where the output will be\n                                 generated. If the directory already exists,\n                                 the script will abort unless -f option (see\n                                 below) is used, in which case the contents of\n                                 the directory will be overwritten. If this\n                                 option is not specified, then {} will be used\n                                 instead.\n                                 \"\"\".format(settings.default_output_directory))\n        self.parser.add_argument('-f', '--force', action='store_true',\n                                 help=\"\"\"\n                                 Overwrite output directory if it exists. By\n                                 default, the script will abort in this\n                                 situation to prevent accidental data loss.\n                                 \"\"\")\n        self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',\n                                 help=\"\"\"\n                                 Specify a workload spec ID from an agenda to\n                                 run. If this is specified, only that\n                                 particular spec will be run, and other\n                                 workloads in the agenda will be ignored. This\n                                 option may be used to specify multiple IDs.\n                                 \"\"\")\n        self.parser.add_argument('--disable', action='append', dest='augmentations_to_disable',\n                                 default=[],\n                                 metavar='INSTRUMENT', help=\"\"\"\n                                 Specify an instrument or output processor to\n                                 disable from the command line. This equivalent\n                                 to adding \"~{metavar}\" to the instruments\n                                 list in the agenda. This can be used to\n                                 temporarily disable a troublesome instrument\n                                 for a particular run without introducing\n                                 permanent change to the config (which one\n                                 might then forget to revert).  This option may\n                                 be specified multiple times.\n                                 \"\"\")\n\n    def execute(self, config, args):  # pylint: disable=arguments-differ\n        output = self.set_up_output_directory(config, args)\n        log.add_file(output.logfile)\n        output.add_artifact('runlog', output.logfile, kind='log',\n                            description='Run log.')\n\n        disabled_augmentations = toggle_set([i != '~~' and \"~{}\".format(i) or i\n                                            for i in args.augmentations_to_disable])\n        config.jobs_config.disable_augmentations(disabled_augmentations)\n        config.jobs_config.only_run_ids(args.only_run_ids)\n\n        parser = AgendaParser()\n        if os.path.isfile(args.agenda):\n            includes = parser.load_from_path(config, args.agenda)\n            shutil.copy(args.agenda, output.raw_config_dir)\n            for inc in includes:\n                shutil.copy(inc, output.raw_config_dir)\n        else:\n            try:\n                pluginloader.get_plugin_class(args.agenda, kind='workload')\n                agenda = {'workloads': [{'name': args.agenda}]}\n                parser.load(config, agenda, 'CMDLINE_ARGS')\n            except NotFoundError:\n                msg = 'Agenda file \"{}\" does not exist, and there no workload '\\\n                      'with that name.\\nYou can get a list of available '\\\n                      'by running \"wa list workloads\".'\n                raise ConfigError(msg.format(args.agenda))\n\n        # Update run info with newly parsed config values\n        output.info.project = config.run_config.project\n        output.info.project_stage = config.run_config.project_stage\n        output.info.run_name = config.run_config.run_name\n\n        executor = Executor()\n        executor.execute(config, output)\n\n    def set_up_output_directory(self, config, args):\n        if args.output_directory:\n            output_directory = args.output_directory\n        else:\n            output_directory = settings.default_output_directory\n        self.logger.debug('Using output directory: {}'.format(output_directory))\n        try:\n            return init_run_output(output_directory, config, args.force)\n        except RuntimeError as e:\n            if 'path exists' in str(e):\n                msg = 'Output directory \"{}\" exists.\\nPlease specify another '\\\n                      'location, or use -f option to overwrite.'\n                self.logger.critical(msg.format(output_directory))\n                sys.exit(1)\n            else:\n                raise e\n"
  },
  {
    "path": "wa/commands/schema_changelog.rst",
    "content": "# 1\n## 1.0\n- First version\n## 1.1\n- LargeObjects table added as a substitute for the previous plan to\n  use the filesystem and a path reference to store artifacts. This\n  was done following an extended discussion and tests that verified\n  that the savings in processing power were not enough to warrant\n  the creation of a dedicated server or file handler.\n## 1.2\n- Rename the `resourcegetters` table to `resource_getters` for consistency.\n- Add Job and Run level classifiers.\n- Add missing android specific properties to targets.\n- Add new POD meta data to relevant tables. \n- Correct job column name from `retires` to `retry`.\n- Add missing run information.\n## 1.3\n- Add missing \"system_id\" field from TargetInfo.\n- Enable support for uploading Artifact that represent directories.\n## 1.4\n- Add \"modules\" field to TargetInfo to list the modules loaded by the target\n  during the run.\n## 1.5\n- Change the type of the \"hostid\" in TargetInfo from Int to Bigint.\n## 1.6\n- Add cascading deletes to most tables to allow easy deletion of a run\n  and its associated data\n- Add rule to delete associated large object on deletion of artifact"
  },
  {
    "path": "wa/commands/show.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# TODO: because of some weirdness involving get_params_rst and underline\n#       functions from wa.utils.doc, pylint gets stuck here for a very\n#       long time. To avoid that, skip this file.\n# pylint: disable-all\n\nimport sys\nimport platform\nfrom subprocess import call, Popen, PIPE\n\nfrom devlib.utils.misc import escape_double_quotes\n\nfrom wa import Command\nfrom wa.framework import pluginloader\nfrom wa.framework.configuration.core import MetaConfiguration, RunConfiguration\nfrom wa.framework.exception import NotFoundError\nfrom wa.framework.target.descriptor import list_target_descriptions\nfrom wa.utils.types import caseless_string, identifier\nfrom wa.utils.doc import (strip_inlined_text, get_rst_from_plugin,\n                          get_params_rst, underline)\nfrom wa.utils.misc import which\n\n\nclass ShowCommand(Command):\n\n    name = 'show'\n    description = 'Display documentation for the specified plugin (workload, instrument, etc.).'\n\n    def initialize(self, context):\n        self.parser.add_argument('plugin', metavar='PLUGIN',\n                                 help='The name of the plugin to display documentation for.')\n\n    def execute(self, state, args):\n        name = identifier(args.plugin)\n        rst_output = None\n\n        if name == caseless_string('settings'):\n            rst_output = get_rst_for_global_config()\n            rst_output += get_rst_for_envars()\n            plugin_name = name.lower()\n            kind = 'global:'\n        else:\n            try:\n                plugin = pluginloader.get_plugin_class(name)\n            except NotFoundError:\n                plugin = None\n            if plugin:\n                rst_output = get_rst_from_plugin(plugin)\n                plugin_name = plugin.name\n                kind = '{}:'.format(plugin.kind)\n            else:\n                target = get_target_description(name)\n                if target:\n                    rst_output = get_rst_from_target(target)\n                    plugin_name = target.name\n                    kind = 'target:'\n\n        if not rst_output:\n            raise NotFoundError('Could not find plugin or alias \"{}\"'.format(name))\n\n        if which('pandoc'):\n            if platform.system() == \"Darwin\":\n                # The version of `man` shipped with macOS does not support `-l`. You need to use GNU man from:\n                # https://formulae.brew.sh/formula/man-db\n                if which(\"gman\") is None:\n                    print(rst_output)\n                man = \"gman\"\n            else:\n                man = \"man\"\n\n            p = Popen(['pandoc', '-f', 'rst', '-t', 'man'], stdin=PIPE, stdout=PIPE, stderr=PIPE)\n            output, _ = p.communicate(rst_output.encode(sys.stdin.encoding))\n            output = output.decode(sys.stdout.encoding)\n\n            # Make sure to double escape back slashes\n            output = output.replace('\\\\', '\\\\\\\\\\\\')\n\n            # Correctly format the title and page number of the man page\n            title, body = output.split('\\n', 1)\n            title = '.TH {}{} 7'.format(kind, plugin_name)\n            output = '\\n'.join([title, body])\n\n            call('echo \"{}\" | {} -l -'.format(escape_double_quotes(output), man), shell=True)\n        else:\n            print(rst_output)  # pylint: disable=superfluous-parens\n\n\ndef get_target_description(name):\n    targets = list_target_descriptions()\n    for target in targets:\n        if name == identifier(target.name):\n            return target\n\n\ndef get_rst_from_target(target):\n    text = underline(target.name, '~')\n    if hasattr(target, 'description'):\n        desc = strip_inlined_text(target.description or '')\n        text += desc\n    text += underline('Device Parameters:', '-')\n    text += get_params_rst(target.conn_params)\n    text += get_params_rst(target.platform_params)\n    text += get_params_rst(target.target_params)\n    text += get_params_rst(target.assistant_params)\n    text += '.. Note: For available runtime parameters please see the documentation'\n    return text + '\\n'\n\n\ndef get_rst_for_global_config():\n    text = underline('Global Configuration')\n    text += 'These parameters control the behaviour of WA/run as a whole, they ' \\\n            'should be set inside a config file (either located in ' \\\n            '$WA_USER_DIRECTORY/config.yaml or one which is specified with -c), ' \\\n            'or into config/global section of the agenda.\\n\\n'\n\n    cfg_points = MetaConfiguration.config_points + RunConfiguration.config_points\n    text += get_params_rst(cfg_points)\n    return text\n\n\ndef get_rst_for_envars():\n    text = underline('Environment Variables')\n    text += '''WA_USER_DIRECTORY: str\n    This is the location WA will look for config.yaml, plugins,  dependencies,\n    and it will also be used for local caches, etc. If this variable is not set,\n    the default location is ``~/.workload_automation`` (this is created when WA\n    is installed).\n\n    .. note.. This location must be writable by the user who runs WA.'''\n    return text\n"
  },
  {
    "path": "wa/commands/templates/apk_workload",
    "content": "from wa import Parameter, ApkWorkload\n\n\nclass ${class_name}(ApkWorkload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n    # Replace with a list of supported package name(s) in the APK file(s).\n    package_names = ['package_name']\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the application\n\n    def setup_rerun(self, context):\n        super(${class_name}, self).setup(context)\n        # If the workloads has the `requires_rerun` attribute set to `True` this\n        # method may be used to perform any necessary setup for the rerun of the\n        # application.\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/commands/templates/apkrevent_workload",
    "content": "from wa import Parameter, ApkReventWorkload\n\n\nclass ${class_name}(ApkReventWorkload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n    # Replace with a list of supported package names in the APK file(s).\n    package_names = ['package_name']\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the UI automation\n\n    def setup_rerun(self, context):\n        super(${class_name}, self).setup(context)\n        # If the workloads has the `requires_rerun` attribute set to `True` this\n        # method may be used to perform any necessary setup for the rerun of the\n        # application.\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/commands/templates/apkuiauto_workload",
    "content": "from wa import Parameter, ApkUiautoWorkload\n\n\nclass ${class_name}(ApkUiautoWorkload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n    # Replace with a list of supported package names in the APK file(s).\n    package_names = ['package_name']\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the UI automation\n\n    def setup_rerun(self, context):\n        super(${class_name}, self).setup(context)\n        # If the workloads has the `requires_rerun` attribute set to `True` this\n        # method may be used to perform any necessary setup for the rerun of the\n        # application.\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/commands/templates/basic_workload",
    "content": "from wa import Parameter, Workload\n\n\nclass ${class_name}(Workload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the workload\n\n    def run(self, context):\n        super(${class_name}, self).run(context)\n        # Perform the main functionality of the workload\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/commands/templates/revent_workload",
    "content": "from wa import Parameter, ReventWorkload\n\n\nclass ${class_name}(ReventWorkload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the UI automation\n\n    def run(self, context):\n        super(${class_name}, self).run(context)\n        # Perform the main functionality of the workload\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/commands/templates/setup.template",
    "content": "import os\nimport sys\nimport warnings\nfrom multiprocessing import Process\n\ntry:\n    from setuptools.command.install import install as orig_install\n    from setuptools import setup\nexcept ImportError:\n    from distutils.command.install import install as orig_install\n    from distutils.core import setup\n\ntry:\n    import pwd\nexcept ImportError:\n    pwd = None\n\nwarnings.filterwarnings('ignore', \"Unknown distribution option: 'install_requires'\")\n\ntry:\n    os.remove('MANIFEST')\nexcept OSError:\n    pass\n\n\npackages = []\ndata_files = {}\nsource_dir = os.path.dirname(__file__)\nfor root, dirs, files in os.walk('$package_name'):\n    rel_dir = os.path.relpath(root, source_dir)\n    data = []\n    if '__init__.py' in files:\n        for f in files:\n            if os.path.splitext(f)[1] not in ['.py', '.pyc', '.pyo']:\n                data.append(f)\n        package_name = rel_dir.replace(os.sep, '.')\n        package_dir = root\n        packages.append(package_name)\n        data_files[package_name] = data\n    else:\n        # use previous package name\n        filepaths = [os.path.join(root, f) for f in files]\n        data_files[package_name].extend([os.path.relpath(f, package_dir) for f in filepaths])\n\nparams = dict(\n    name='$package_name',\n    version='0.0.1',\n    packages=packages,\n    package_data=data_files,\n    url='N/A',\n    maintainer='$user',\n    maintainer_email='$user@example.com',\n    install_requires=[\n        'wa',\n    ],\n    # https://pypi.python.org/pypi?%3Aaction=list_classifiers\n    classifiers=[\n        'Development Status :: 3 - Alpha',\n        'Environment :: Console',\n        'License :: Other/Proprietary License',\n        'Operating System :: Unix',\n        'Programming Language :: Python :: 3',\n    ],\n)\n\n\ndef update_wa_packages():\n    sudo_user = os.getenv('SUDO_USER')\n    if sudo_user:\n        user_entry = pwd.getpwnam(sudo_user)\n        os.setgid(user_entry.pw_gid)\n        os.setuid(user_entry.pw_uid)\n    env_root = os.getenv('WA_USER_DIRECTORY', os.path.join(os.path.expanduser('~'), '.workload_automation'))\n    if not os.path.isdir(env_root):\n        os.makedirs(env_root)\n    wa_packages_file = os.path.join(env_root, 'packages')\n    if os.path.isfile(wa_packages_file):\n        with open(wa_packages_file, 'r') as wfh:\n            package_list = wfh.read().split()\n            if params['name'] not in package_list:\n                package_list.append(params['name'])\n    else:  # no existing package file\n        package_list = [params['name']]\n    with open(wa_packages_file, 'w') as wfh:\n        wfh.write('\\n'.join(package_list))\n\n\nclass install(orig_install):\n\n    def run(self):\n        orig_install.run(self)\n        # Must be done in a separate process because will drop privileges if\n        # sudo, and won't be able to reacquire them.\n        p = Process(target=update_wa_packages)\n        p.start()\n        p.join()\n\n\nparams['cmdclass'] = {'install': install}\n\n\nsetup(**params)\n"
  },
  {
    "path": "wa/commands/templates/uiauto/UiAutomation.java",
    "content": "package ${package_name};\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport android.support.test.runner.AndroidJUnit4;\n\nimport android.util.Log;\nimport android.view.KeyEvent;\n\n// Import the uiautomator libraries\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    protected Bundle parameters;\n    protected String packageID;\n    protected int example_parameter;\n\n    public static String TAG = \"${name}\";\n\n    @Before\n    public void initialize() throws Exception {\n        // Perform any parameter initialization here\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n        example_parameter = parameters.getInt(\"example_parameter\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        // Optional: Perform any setup required before the main workload\n        // is ran, e.g. dismissing welcome screens\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n\t   // The main UI Automation code goes here\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        // Optional: Extract any relevant results from the workload,\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        // Optional: Perform any clean up for the workload\n    }\n}\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"${package_name}\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${package_name}\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_build.gradle",
    "content": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion '28.0.0'\n    defaultConfig {\n        applicationId \"${package_name}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${package_name}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    compile fileTree(include: ['*.jar'], dir: 'libs')\n    compile 'com.android.support.test:runner:0.5'\n    compile 'com.android.support.test:rules:0.5'\n    compile 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    compile(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_build_script",
    "content": "#!/bin/bash\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $$0 &>/dev/null`; then\n    script_path=`readlink -f $$0 2>/dev/null`\nfi\nscript_dir=`dirname $$script_path`\ncd $$script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wlauto dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $$libs_dir\ncp $$base_class $$libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$$?\nif [[ $$exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $$exit_code\"\n    exit $$exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\nrm -f ../$package_name\nif [[ -f app/build/outputs/apk/debug/$package_name.apk ]]; then\n    cp app/build/outputs/apk/debug/$package_name.apk ../$package_name.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_workload_template/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_workload_template/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_workload_template/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_workload_template/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/commands/templates/uiauto/uiauto_workload_template/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/commands/templates/uiauto_workload",
    "content": "from wa import Parameter, UiautoWorkload\n\n\nclass ${class_name}(UiautoWorkload):\n\n    name = '${name}'\n    description = \"This is an placeholder description\"\n    # Replace with a list of supported package names from the APK file(s)\n    package_names = ['package_name']\n\n    parameters = [\n        # Workload parameters go here e.g.\n        Parameter('example_parameter', kind=int, allowed_values=[1,2,3],\n                  default=1, override=True, mandatory=False,\n                  description='This is an example parameter')\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(${class_name}, self).__init__(target, **kwargs)\n        # Define any additional attributes required for the workload\n\n    def init_resources(self, resolver):\n        super(${class_name}, self).init_resources(resolver)\n        # This method may be used to perform early resource discovery and\n        # initialization. This is invoked during the initial loading stage and\n        # before the device is ready, so cannot be used for any device-dependent\n        # initialization. This method is invoked before the workload instance is\n        # validated.\n\n    def initialize(self, context):\n        super(${class_name}, self).initialize(context)\n        # This method should be used to perform once-per-run initialization of a\n        # workload instance.\n\n    def validate(self):\n        super(${class_name}, self).validate()\n        # Validate inter-parameter assumptions etc\n\n    def setup(self, context):\n        super(${class_name}, self).setup(context)\n        # Perform any necessary setup before starting the UI automation\n\n    def run(self, context):\n        super(${class_name}, self).run(context)\n        # Perform the main functionality of the workload\n\n    def extract_results(self, context):\n        super(${class_name}, self).extract_results(context)\n        # Extract results on the target\n\n    def update_output(self, context):\n        super(${class_name}, self).update_output(context)\n        # Update the output within the specified execution context with the\n        # metrics and artifacts form this workload iteration.\n\n    def teardown(self, context):\n        super(${class_name}, self).teardown(context)\n        # Perform any final clean up for the Workload.\n"
  },
  {
    "path": "wa/framework/__init__.py",
    "content": ""
  },
  {
    "path": "wa/framework/command.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport textwrap\n\nfrom wa.framework.exception import CommandError\nfrom wa.framework.plugin import Plugin\nfrom wa.framework.version import get_wa_version\nfrom wa.utils.doc import format_body\n\n\ndef init_argument_parser(parser):\n    parser.add_argument('-c', '--config', action='append', default=[],\n                        help='specify an additional config.yaml')\n    parser.add_argument('-v', '--verbose', action='count',\n                        help='The scripts will produce verbose output.')\n    parser.add_argument('--version', action='version',\n                        version='%(prog)s {}'.format(get_wa_version()))\n    return parser\n\n\nclass SubCommand(object):\n    \"\"\"\n    Defines a Workload Automation command. This will be executed from the\n    command line as ``wa <command> [args ...]``. This defines the name to be\n    used when invoking wa, the code that will actually be executed on\n    invocation and the argument parser to be used to parse the reset of the\n    command line arguments.\n\n    \"\"\"\n    name = None\n    help = None\n    usage = None\n    description = None\n    epilog = None\n    formatter_class = None\n\n    def __init__(self, logger, subparsers):\n        self.logger = logger\n        self.group = subparsers\n        desc = format_body(textwrap.dedent(self.description), 80)\n        parser_params = dict(help=(self.help or self.description), usage=self.usage,\n                             description=desc, epilog=self.epilog)\n        if self.formatter_class:\n            parser_params['formatter_class'] = self.formatter_class\n        self.parser = subparsers.add_parser(self.name, **parser_params)\n        init_argument_parser(self.parser)  # propagate top-level options\n        self.initialize(None)\n\n    def initialize(self, context):\n        \"\"\"\n        Perform command-specific initialisation (e.g. adding command-specific\n        options to the command's parser). ``context`` is always ``None``.\n\n        \"\"\"\n\n    def execute(self, state, args):\n        \"\"\"\n        Execute this command.\n\n        :state: An initialized ``ConfigManager`` that contains the current state of\n                WA exeuction up to that point (processed configuraition, loaded\n                plugins, etc).\n        :args: An ``argparse.Namespace`` containing command line arguments (as\n               returned by ``argparse.ArgumentParser.parse_args()``. This would\n               usually be the result of invoking ``self.parser``.\n\n        \"\"\"\n        raise NotImplementedError()\n\n\nclass Command(Plugin, SubCommand):  # pylint: disable=abstract-method\n    \"\"\"\n    Defines a Workload Automation command. This will be executed from the\n    command line as ``wa <command> [args ...]``. This defines the name to be\n    used when invoking wa, the code that will actually be executed on\n    invocation and the argument parser to be used to parse the reset of the\n    command line arguments.\n\n    \"\"\"\n    kind = \"command\"\n\n    def __init__(self, subparsers):\n        Plugin.__init__(self)\n        SubCommand.__init__(self, self.logger, subparsers)\n\n\nclass ComplexCommand(Command):\n    \"\"\"\n    A command that defines sub-commands.\n\n    \"\"\"\n\n    subcmd_classes = []\n\n    def __init__(self, subparsers):\n        self.subcommands = []\n        super(ComplexCommand, self).__init__(subparsers)\n\n    def initialize(self, context):\n        subparsers = self.parser.add_subparsers(dest='what', metavar='SUBCMD')\n        subparsers.required = True\n        for subcmd_cls in self.subcmd_classes:\n            subcmd = subcmd_cls(self.logger, subparsers)\n            self.subcommands.append(subcmd)\n\n    def execute(self, state, args):\n        for subcmd in self.subcommands:\n            if subcmd.name == args.what:\n                subcmd.execute(state, args)\n                break\n        else:\n            raise CommandError('Not a valid create parameter: {}'.format(args.name))\n"
  },
  {
    "path": "wa/framework/configuration/__init__.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom wa.framework.configuration.core import (settings,\n                                             RunConfiguration,\n                                             JobGenerator,\n                                             ConfigurationPoint)\n"
  },
  {
    "path": "wa/framework/configuration/core.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport logging\nfrom copy import copy, deepcopy\nfrom collections import OrderedDict, defaultdict\n\nfrom wa.framework.exception import ConfigError, NotFoundError\nfrom wa.framework.configuration.tree import SectionNode\nfrom wa.utils import log\nfrom wa.utils.misc import (get_article, merge_config_values)\nfrom wa.utils.types import (identifier, integer, boolean, list_of_strings,\n                            list_of, toggle_set, obj_dict, enum)\nfrom wa.utils.serializer import is_pod, Podable\n\n\n# Mapping for kind conversion; see docs for convert_types below\nKIND_MAP = {\n    int: integer,\n    bool: boolean,\n    dict: OrderedDict,\n}\n\nStatus = enum(['UNKNOWN', 'NEW', 'PENDING',\n               'STARTED', 'CONNECTED', 'INITIALIZED', 'RUNNING',\n               'OK', 'PARTIAL', 'FAILED', 'ABORTED', 'SKIPPED'])\n\nlogger = logging.getLogger('config')\n\n\n##########################\n### CONFIG POINT TYPES ###\n##########################\n\n\nclass RebootPolicy(object):\n    \"\"\"\n    Represents the reboot policy for the execution -- at what points the device\n    should be rebooted. This, in turn, is controlled by the policy value that is\n    passed in on construction and would typically be read from the user's settings.\n    Valid policy values are:\n\n    :never: The device will never be rebooted.\n    :as_needed: Only reboot the device if it becomes unresponsive, or needs to be flashed, etc.\n    :initial: The device will be rebooted when the execution first starts, just before\n              executing the first workload spec.\n    :each_spec: The device will be rebooted before running a new workload spec.\n    :each_iteration: The device will be rebooted before each new iteration.\n    :run_completion: The device will be rebooted after the run has been completed.\n\n    \"\"\"\n\n    valid_policies = ['never', 'as_needed', 'initial', 'each_spec', 'each_job', 'run_completion']\n\n    @staticmethod\n    def from_pod(pod):\n        return RebootPolicy(pod)\n\n    def __init__(self, policy):\n        if isinstance(policy, RebootPolicy):\n            policy = policy.policy\n        policy = policy.strip().lower().replace(' ', '_')\n        if policy not in self.valid_policies:\n            message = 'Invalid reboot policy {}; must be one of {}'.format(policy, ', '.join(self.valid_policies))\n            raise ConfigError(message)\n        self.policy = policy\n\n    @property\n    def can_reboot(self):\n        return self.policy != 'never'\n\n    @property\n    def perform_initial_reboot(self):\n        return self.policy == 'initial'\n\n    @property\n    def reboot_on_each_job(self):\n        return self.policy == 'each_job'\n\n    @property\n    def reboot_on_each_spec(self):\n        return self.policy == 'each_spec'\n\n    @property\n    def reboot_on_run_completion(self):\n        return self.policy == 'run_completion'\n\n    def __str__(self):\n        return self.policy\n\n    __repr__ = __str__\n\n    def __eq__(self, other):\n        if isinstance(other, RebootPolicy):\n            return self.policy == other.policy\n        else:\n            return self.policy == other\n\n    def to_pod(self):\n        return self.policy\n\n\nclass status_list(list):\n\n    def append(self, item):\n        list.append(self, str(item).upper())\n\n\nclass LoggingConfig(Podable, dict):\n\n    _pod_serialization_version = 1\n\n    defaults = {\n        'file_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',\n        'verbose_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',\n        'regular_format': '%(levelname)-8s %(message)s',\n        'color': True,\n    }\n\n    @staticmethod\n    def from_pod(pod):\n        pod = LoggingConfig._upgrade_pod(pod)\n        pod_version = pod.pop('_pod_version')\n        instance = LoggingConfig(pod)\n        instance._pod_version = pod_version  # pylint: disable=protected-access\n        return instance\n\n    def __init__(self, config=None):\n        super(LoggingConfig, self).__init__()\n        dict.__init__(self)\n        if isinstance(config, dict):\n            config = {identifier(k.lower()): v for k, v in config.items()}\n            self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])\n            self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])\n            self['file_format'] = config.pop('file_format', self.defaults['file_format'])\n            self['color'] = config.pop('colour_enabled', self.defaults['color'])  # legacy\n            self['color'] = config.pop('color', self.defaults['color'])\n            if config:\n                message = 'Unexpected logging configuration parameters: {}'\n                raise ValueError(message.format(bad_vals=', '.join(list(config.keys()))))\n        elif config is None:\n            for k, v in self.defaults.items():\n                self[k] = v\n        else:\n            raise ValueError(config)\n\n    def to_pod(self):\n        pod = super(LoggingConfig, self).to_pod()\n        pod.update(self)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n\ndef expanded_path(path):\n    \"\"\"\n    Ensure that the provided path has been expanded if applicable\n    \"\"\"\n    return os.path.expanduser(str(path))\n\n\ndef get_type_name(kind):\n    typename = str(kind)\n    if '\\'' in typename:\n        typename = typename.split('\\'')[1]\n    elif typename.startswith('<function'):\n        typename = typename.split()[1]\n    return typename\n\n\nclass ConfigurationPoint(object):\n    \"\"\"\n    This defines a generic configuration point for workload automation. This is\n    used to handle global settings, plugin parameters, etc.\n\n    \"\"\"\n\n    def __init__(self, name,\n                 kind=None,\n                 mandatory=None,\n                 default=None,\n                 override=False,\n                 allowed_values=None,\n                 description=None,\n                 constraint=None,\n                 merge=False,\n                 aliases=None,\n                 global_alias=None,\n                 deprecated=False):\n        \"\"\"\n        Create a new Parameter object.\n\n        :param name: The name of the parameter. This will become an instance\n                     member of the plugin object to which the parameter is\n                     applied, so it must be a valid python  identifier. This\n                     is the only mandatory parameter.\n        :param kind: The type of parameter this is. This must be a callable\n                     that takes an arbitrary object and converts it to the\n                     expected type, or raised ``ValueError`` if such conversion\n                     is not possible. Most Python standard types -- ``str``,\n                     ``int``, ``bool``, etc. -- can be used here. This\n                     defaults to ``str`` if not specified.\n        :param mandatory: If set to ``True``, then a non-``None`` value for\n                          this parameter *must* be provided on plugin\n                          object construction, otherwise ``ConfigError``\n                          will be raised.\n        :param default: The default value for this parameter. If no value\n                        is specified on plugin construction, this value\n                        will be used instead. (Note: if this is specified\n                        and is not ``None``, then ``mandatory`` parameter\n                        will be ignored).\n        :param override: A ``bool`` that specifies whether a parameter of\n                         the same name further up the hierarchy should\n                         be overridden. If this is ``False`` (the\n                         default), an exception will be raised by the\n                         ``AttributeCollection`` instead.\n        :param allowed_values: This should be the complete list of allowed\n                               values for this parameter.  Note: ``None``\n                               value will always be allowed, even if it is\n                               not in this list.  If you want to disallow\n                               ``None``, set ``mandatory`` to ``True``.\n        :param constraint: If specified, this must be a callable that takes\n                           the parameter value as an argument and return a\n                           boolean indicating whether the constraint has been\n                           satisfied. Alternatively, can be a two-tuple with\n                           said callable as the first element and a string\n                           describing the constraint as the second.\n        :param merge: The default behaviour when setting a value on an object\n                      that already has that attribute is to overrided with\n                      the new value. If this is set to ``True`` then the two\n                      values will be merged instead. The rules by which the\n                      values are merged will be determined by the types of\n                      the existing and new values -- see\n                      ``merge_config_values`` documentation for details.\n        :param aliases: Alternative names for the same configuration point.\n                        These are largely for backwards compatibility.\n        :param global_alias: An alias for this parameter that can be specified at\n                            the global level. A global_alias can map onto many\n                            ConfigurationPoints.\n        :param deprecated: Specify that this parameter is deprecated and its\n                           config should be ignored. If supplied WA will display\n                           a warning to the user however will continue execution.\n        \"\"\"\n        self.name = identifier(name)\n        kind = KIND_MAP.get(kind, kind)\n        if kind is not None and not callable(kind):\n            raise ValueError('Kind must be callable.')\n        self.kind = kind\n        self.mandatory = mandatory\n        if not is_pod(default):\n            msg = \"The default for '{}' must be a Plain Old Data type, but it is of type '{}' instead.\"\n            raise TypeError(msg.format(self.name, type(default)))\n        self.default = default\n        self.override = override\n        self.allowed_values = allowed_values\n        self.description = description\n        if self.kind is None and not self.override:\n            self.kind = str\n        if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple):\n            raise ValueError('Constraint must be callable or a (callable, str) tuple.')\n        self.constraint = constraint\n        self.merge = merge\n        self.aliases = aliases or []\n        self.global_alias = global_alias\n        self.deprecated = deprecated\n\n        if self.default is not None:\n            try:\n                self.validate_value(\"init\", self.default)\n            except ConfigError:\n                raise ValueError('Default value \"{}\" is not valid'.format(self.default))\n\n    def match(self, name):\n        if name == self.name or name in self.aliases:\n            return True\n        elif name == self.global_alias:\n            return True\n        return False\n\n    def set_value(self, obj, value=None, check_mandatory=True):\n        if self.deprecated:\n            if value is not None:\n                msg = 'Depreciated parameter supplied for \"{}\" in \"{}\". The value will be ignored.'\n                logger.warning(msg.format(self.name, obj.name))\n            return\n        if value is None:\n            if self.default is not None:\n                value = self.kind(self.default)\n            elif check_mandatory and self.mandatory:\n                msg = 'No values specified for mandatory parameter \"{}\" in {}'\n                raise ConfigError(msg.format(self.name, obj.name))\n        else:\n            try:\n                value = self.kind(value)\n            except (ValueError, TypeError):\n                typename = get_type_name(self.kind)\n                msg = 'Bad value \"{}\" for {}; must be {} {}'\n                article = get_article(typename)\n                raise ConfigError(msg.format(value, self.name, article, typename))\n        if value is not None:\n            self.validate_value(self.name, value)\n        if self.merge and hasattr(obj, self.name):\n            value = merge_config_values(getattr(obj, self.name), value)\n        setattr(obj, self.name, value)\n\n    def validate(self, obj, check_mandatory=True):\n        if self.deprecated:\n            return\n        value = getattr(obj, self.name, None)\n        if value is not None:\n            self.validate_value(obj.name, value)\n        else:\n            if check_mandatory and self.mandatory:\n                msg = 'No value specified for mandatory parameter \"{}\" in {}.'\n                raise ConfigError(msg.format(self.name, obj.name))\n\n    def validate_value(self, name, value):\n        if self.allowed_values:\n            self.validate_allowed_values(name, value)\n        if self.constraint:\n            self.validate_constraint(name, value)\n\n    def validate_allowed_values(self, name, value):\n        if 'list' in str(self.kind):\n            for v in value:\n                if v not in self.allowed_values:\n                    msg = 'Invalid value {} for {} in {}; must be in {}'\n                    raise ConfigError(msg.format(v, self.name, name, self.allowed_values))\n        else:\n            if value not in self.allowed_values:\n                msg = 'Invalid value {} for {} in {}; must be in {}'\n                raise ConfigError(msg.format(value, self.name, name, self.allowed_values))\n\n    def validate_constraint(self, name, value):\n        msg_vals = {'value': value, 'param': self.name, 'plugin': name}\n        if isinstance(self.constraint, tuple) and len(self.constraint) == 2:\n            constraint, msg = self.constraint  # pylint: disable=unpacking-non-sequence\n        elif callable(self.constraint):\n            constraint = self.constraint\n            msg = '\"{value}\" failed constraint validation for \"{param}\" in \"{plugin}\".'\n        else:\n            raise ValueError('Invalid constraint for \"{}\": must be callable or a 2-tuple'.format(self.name))\n        if not constraint(value):\n            raise ConfigError(value, msg.format(**msg_vals))\n\n    def __repr__(self):\n        d = copy(self.__dict__)\n        del d['description']\n        return 'ConfigurationPoint({})'.format(d)\n\n    __str__ = __repr__\n\n\n#####################\n### Configuration ###\n#####################\n\n\ndef _to_pod(cfg_point, value):\n    if is_pod(value):\n        return value\n    if hasattr(cfg_point.kind, 'to_pod'):\n        return value.to_pod()\n    msg = '{} value \"{}\" is not serializable'\n    raise ValueError(msg.format(cfg_point.name, value))\n\n\nclass Configuration(Podable):\n\n    _pod_serialization_version = 1\n    config_points = []\n    name = ''\n\n    # The below line must be added to all subclasses\n    configuration = {cp.name: cp for cp in config_points}\n\n    @classmethod\n    def from_pod(cls, pod):\n        instance = super(Configuration, cls).from_pod(pod)\n        for cfg_point in cls.config_points:\n            if cfg_point.name in pod:\n                value = pod.pop(cfg_point.name)\n                if hasattr(cfg_point.kind, 'from_pod'):\n                    value = cfg_point.kind.from_pod(value)\n                cfg_point.set_value(instance, value)\n        if pod:\n            msg = 'Invalid entry(ies) for \"{}\": \"{}\"'\n            raise ValueError(msg.format(cls.name, '\", \"'.join(list(pod.keys()))))\n        return instance\n\n    def __init__(self):\n        super(Configuration, self).__init__()\n        for confpoint in self.config_points:\n            confpoint.set_value(self, check_mandatory=False)\n\n    def set(self, name, value, check_mandatory=True):\n        if name not in self.configuration:\n            raise ConfigError('Unknown {} configuration \"{}\"'.format(self.name,\n                                                                     name))\n        try:\n            self.configuration[name].set_value(self, value,\n                                               check_mandatory=check_mandatory)\n        except (TypeError, ValueError, ConfigError) as e:\n            msg = 'Invalid value \"{}\" for \"{}\": {}'\n            raise ConfigError(msg.format(value, name, e))\n\n    def update_config(self, values, check_mandatory=True):\n        for k, v in values.items():\n            self.set(k, v, check_mandatory=check_mandatory)\n\n    def validate(self):\n        for cfg_point in self.config_points:\n            cfg_point.validate(self)\n\n    def to_pod(self):\n        pod = super(Configuration, self).to_pod()\n        for cfg_point in self.config_points:\n            value = getattr(self, cfg_point.name, None)\n            pod[cfg_point.name] = _to_pod(cfg_point, value)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n\n# This configuration for the core WA framework\nclass MetaConfiguration(Configuration):\n\n    name = \"Meta Configuration\"\n\n    core_plugin_packages = [\n        'wa.commands',\n        'wa.framework.getters',\n        'wa.framework.target.descriptor',\n        'wa.instruments',\n        'wa.output_processors',\n        'wa.workloads',\n    ]\n\n    config_points = [\n        ConfigurationPoint(\n            'user_directory',\n            description=\"\"\"\n            Path to the user directory. This is the location WA will look for\n            user configuration, additional plugins and plugin dependencies.\n            \"\"\",\n            kind=expanded_path,\n            default=os.path.join(os.path.expanduser('~'), '.workload_automation'),\n        ),\n        ConfigurationPoint(\n            'assets_repository',\n            description=\"\"\"\n            The local mount point for the filer hosting WA assets.\n            \"\"\",\n            default=''\n        ),\n        ConfigurationPoint(\n            'logging',\n            kind=LoggingConfig,\n            default=LoggingConfig.defaults,\n            description=\"\"\"\n            WA logging configuration. This should be a dict with a subset\n            of the following keys::\n\n                :normal_format: Logging format used for console output\n                :verbose_format: Logging format used for verbose console output\n                :file_format: Logging format used for run.log\n                :color: If ``True`` (the default), console logging output will\n                        contain bash color escape codes. Set this to ``False`` if\n                        console output will be piped somewhere that does not know\n                        how to handle those.\n            \"\"\",\n        ),\n        ConfigurationPoint(\n            'verbosity',\n            kind=int,\n            default=0,\n            description=\"\"\"\n            Verbosity of console output.\n            \"\"\",\n        ),\n        ConfigurationPoint(  # TODO: Needs some format for dates etc/ comes from cfg\n            'default_output_directory',\n            default=\"wa_output\",\n            description=\"\"\"\n            The default output directory that will be created if not\n            specified when invoking a run.\n            \"\"\",\n        ),\n        ConfigurationPoint(\n            'extra_plugin_paths',\n            kind=list_of_strings,\n            description=\"\"\"\n            A list of additional paths to scan for plugins.\n            \"\"\",\n        ),\n    ]\n    configuration = {cp.name: cp for cp in config_points}\n\n    @property\n    def dependencies_directory(self):\n        return os.path.join(self.user_directory, 'dependencies')\n\n    @property\n    def plugins_directory(self):\n        return os.path.join(self.user_directory, 'plugins')\n\n    @property\n    def cache_directory(self):\n        return os.path.join(self.user_directory, 'cache')\n\n    @property\n    def plugin_paths(self):\n        return [self.plugins_directory] + (self.extra_plugin_paths or [])\n\n    @property\n    def user_config_file(self):\n        return os.path.join(self.user_directory, 'config.yaml')\n\n    @property\n    def additional_packages_file(self):\n        return os.path.join(self.user_directory, 'packages')\n\n    @property\n    def target_info_cache_file(self):\n        return os.path.join(self.cache_directory, 'targets.json')\n\n    @property\n    def apk_info_cache_file(self):\n        return os.path.join(self.cache_directory, 'apk_info.json')\n\n    def __init__(self, environ=None):\n        super(MetaConfiguration, self).__init__()\n        if environ is None:\n            environ = os.environ\n        user_directory = environ.pop('WA_USER_DIRECTORY', '')\n        if user_directory:\n            self.set('user_directory', user_directory)\n\n        extra_plugin_paths = environ.pop('WA_PLUGIN_PATHS', '')\n        if extra_plugin_paths:\n            self.set('extra_plugin_paths', extra_plugin_paths.split(os.pathsep))\n\n        self.plugin_packages = copy(self.core_plugin_packages)\n        if os.path.isfile(self.additional_packages_file):\n            with open(self.additional_packages_file) as fh:\n                extra_packages = [p.strip() for p in fh.read().split('\\n') if p.strip()]\n                self.plugin_packages.extend(extra_packages)\n\n\n# This is generic top-level configuration for WA runs.\nclass RunConfiguration(Configuration):\n\n    name = \"Run Configuration\"\n\n    # Metadata is separated out because it is not loaded into the auto\n    # generated config file\n    meta_data = [\n        ConfigurationPoint(\n            'run_name',\n            kind=str,\n            description='''\n            A string that labels the WA run that is being performed. This would\n            typically be set in the ``config`` section of an agenda (see\n            :ref:`configuration in an agenda <configuration_in_agenda>`) rather\n            than in the config file.\n            ''',\n        ),\n        ConfigurationPoint(\n            'project',\n            kind=str,\n            description='''\n            A string naming the project for which data is being collected. This\n            may be useful, e.g. when uploading data to a shared database that\n            is populated from multiple projects.\n            ''',\n        ),\n        ConfigurationPoint(\n            'project_stage',\n            kind=dict,\n            description='''\n            A dict or a string that allows adding additional identifier. This\n            is may be useful for long-running projects.\n            ''',\n        ),\n    ]\n    config_points = [\n        ConfigurationPoint(\n            'execution_order',\n            kind=str,\n            default='by_iteration',\n            allowed_values=['by_iteration', 'by_section', 'by_workload', 'random'],\n            description='''\n            Defines the order in which the agenda spec will be executed. At the\n            moment, the following execution orders are supported:\n\n            ``\"by_iteration\"``\n                The first iteration of each workload spec is executed one after\n                the other, so all workloads are executed before proceeding on\n                to the second iteration.  E.g. A1 B1 C1 A2 C2 A3. This is the\n                default if no order is explicitly specified.\n\n                In case of multiple sections, this will spread them out, such\n                that specs from the same section are further part. E.g. given\n                sections X and Y, global specs A and B, and two iterations,\n                this will run ::\n\n                        X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2\n\n            ``\"by_section\"``\n                Same  as ``\"by_iteration\"``, however this will group specs from\n                the same section together, so given sections X and Y, global\n                specs A and B, and two iterations, this will run ::\n\n                        X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2\n\n            ``\"by_workload\"``\n                All iterations of the first spec are executed before moving on\n                to the next spec. E.g::\n\n                        X.A1, X.A2, Y.A1, Y.A2, X.B1, X.B2, Y.B1, Y.B2\n\n            ``\"random\"``\n                Execution order is entirely random.\n            ''',\n        ),\n        ConfigurationPoint(\n            'reboot_policy',\n            kind=RebootPolicy,\n            default='as_needed',\n            allowed_values=RebootPolicy.valid_policies,\n            description='''\n            This defines when during execution of a run the Device will be\n            rebooted. The possible values are:\n\n            ``\"as_needed\"``\n                The device will only be rebooted if the need arises (e.g. if it\n                becomes unresponsive.\n\n            ``\"never\"``\n                The device will never be rebooted.\n\n            ``\"initial\"``\n                The device will be rebooted when the execution first starts,\n                just before executing the first workload spec.\n\n            ``\"each_job\"``\n                The device will be rebooted before each new job.\n\n            ``\"each_spec\"``\n                The device will be rebooted before running a new workload spec.\n\n                .. note:: This acts the same as ``each_job`` when execution order\n                          is set to by_iteration\n\n            ``\"run_completion\"``\n                 The device will be rebooted after the run has been completed.\n            '''),\n        ConfigurationPoint(\n            'device',\n            kind=str,\n            default='generic_android',\n            description='''\n            This setting defines what specific ``Device`` subclass will be used to\n            interact the connected device. Obviously, this must match your\n            setup.\n            ''',\n        ),\n        ConfigurationPoint(\n            'retry_on_status',\n            kind=list_of(Status),\n            default=['FAILED', 'PARTIAL'],\n            allowed_values=Status.levels[Status.RUNNING.value:],\n            description='''\n            This is list of statuses on which a job will be considered to have\n            failed and will be automatically retried up to ``max_retries``\n            times. This defaults to ``[\"FAILED\", \"PARTIAL\"]`` if not set.\n            Possible values are:\n\n            ``\"OK\"``\n                This iteration has completed and no errors have been detected\n\n            ``\"PARTIAL\"``\n                One or more instruments have failed (the iteration may still be\n                running).\n\n            ``\"FAILED\"``\n                The workload itself has failed.\n\n            ``\"ABORTED\"``\n                The user interrupted the workload.\n            ''',\n        ),\n        ConfigurationPoint(\n            'max_retries',\n            kind=int,\n            default=2,\n            description='''\n            The maximum number of times failed jobs will be retried before\n            giving up.\n\n            .. note:: This number does not include the original attempt\n            ''',\n        ),\n        ConfigurationPoint(\n            'bail_on_init_failure',\n            kind=bool,\n            default=True,\n            description='''\n            When jobs fail during their main setup and run phases, WA will\n            continue attempting to run the remaining jobs. However, by default,\n            if they fail during their early initialization phase, the entire run\n            will end without continuing to run jobs. Setting this to ``False``\n            means that WA will instead skip all the jobs from the job spec that\n            failed, but continue attempting to run others.\n            '''\n        ),\n        ConfigurationPoint(\n            'bail_on_job_failure',\n            kind=bool,\n            default=False,\n            description='''\n            When a job fails during its run phase, WA will attempt to retry the\n            job, then continue with remaining jobs after. Setting this to\n            ``True`` means WA will skip remaining jobs and end the run if a job\n            has retried the maximum number of times, and still fails.\n            '''\n        ),\n        ConfigurationPoint(\n            'allow_phone_home',\n            kind=bool, default=True,\n            description='''\n            Setting this to ``False`` prevents running any workloads that are marked\n            with 'phones_home', meaning they are at risk of exposing information\n            about the device to the outside world. For example, some benchmark\n            applications upload device data to a database owned by the\n            maintainers.\n\n            This can be used to minimise the risk of accidentally running such\n            workloads when testing confidential devices.\n            '''),\n    ]\n    configuration = {cp.name: cp for cp in config_points + meta_data}\n\n    @classmethod\n    def from_pod(cls, pod):\n        meta_pod = {}\n        for cfg_point in cls.meta_data:\n            meta_pod[cfg_point.name] = pod.pop(cfg_point.name, None)\n\n        device_config = pod.pop('device_config', None)\n        augmentations = pod.pop('augmentations', {})\n        getters = pod.pop('resource_getters', {})\n        instance = super(RunConfiguration, cls).from_pod(pod)\n        instance.device_config = device_config\n        instance.augmentations = augmentations\n        instance.resource_getters = getters\n        for cfg_point in cls.meta_data:\n            cfg_point.set_value(instance, meta_pod[cfg_point.name])\n\n        return instance\n\n    def __init__(self):\n        super(RunConfiguration, self).__init__()\n        for confpoint in self.meta_data:\n            confpoint.set_value(self, check_mandatory=False)\n        self.device_config = None\n        self.augmentations = {}\n        self.resource_getters = {}\n\n    def merge_device_config(self, plugin_cache):\n        \"\"\"\n        Merges global device config and validates that it is correct for the\n        selected device.\n        \"\"\"\n        # pylint: disable=no-member\n        if self.device is None:\n            msg = 'Attempting to merge device config with unspecified device'\n            raise RuntimeError(msg)\n        self.device_config = plugin_cache.get_plugin_config(self.device,\n                                                            generic_name=\"device_config\")\n\n    def add_augmentation(self, aug):\n        if aug.name in self.augmentations:\n            raise ValueError('Augmentation \"{}\" already added.'.format(aug.name))\n        self.augmentations[aug.name] = aug.get_config()\n\n    def add_resource_getter(self, getter):\n        if getter.name in self.resource_getters:\n            raise ValueError('Resource getter \"{}\" already added.'.format(getter.name))\n        self.resource_getters[getter.name] = getter.get_config()\n\n    def to_pod(self):\n        pod = super(RunConfiguration, self).to_pod()\n        pod['device_config'] = dict(self.device_config or {})\n        pod['augmentations'] = self.augmentations\n        pod['resource_getters'] = self.resource_getters\n        return pod\n\n\nclass JobSpec(Configuration):\n    # pylint: disable=access-member-before-definition,attribute-defined-outside-init\n\n    name = \"Job Spec\"\n\n    config_points = [\n        ConfigurationPoint('iterations', kind=int, default=1,\n                           description='''\n                           How many times to repeat this workload spec\n                           '''),\n        ConfigurationPoint('workload_name', kind=str, mandatory=True,\n                           aliases=[\"name\"],\n                           description='''\n                           The name of the workload to run.\n                           '''),\n        ConfigurationPoint('workload_parameters', kind=obj_dict, merge=True,\n                           aliases=[\"params\", \"workload_params\", \"parameters\"],\n                           description='''\n                           Parameter to be passed to the workload\n                           '''),\n        ConfigurationPoint('runtime_parameters', kind=obj_dict, merge=True,\n                           aliases=[\"runtime_params\"],\n                           description='''\n                           Runtime parameters to be set prior to running\n                           the workload.\n                           '''),\n        ConfigurationPoint('boot_parameters', kind=obj_dict,\n                           aliases=[\"boot_params\"],\n                           description='''\n                           Parameters to be used when rebooting the target\n                           prior to running the workload.\n                           '''),\n        ConfigurationPoint('label', kind=str,\n                           description='''\n                           Similar to IDs but do not have the uniqueness restriction.\n                           If specified, labels will be used by some output\n                           processors instead of (or in addition to) the workload\n                           name. For example, the csv output processor will put\n                           the label in the \"workload\" column of the CSV file.\n                           '''),\n        ConfigurationPoint('augmentations', kind=toggle_set, merge=True,\n                           aliases=[\"instruments\", \"processors\", \"instrumentation\",\n                                    \"output_processors\", \"augment\", \"result_processors\"],\n                           description='''\n                           The instruments and output processors to enable (or\n                           disabled using a ~) during this workload spec. This combines the\n                           \"instrumentation\" and \"result_processors\" from\n                           previous versions of WA (the old entries are now\n                           aliases for this).\n                           '''),\n        ConfigurationPoint('flash', kind=dict, merge=True,\n                           description='''\n\n                           '''),\n        ConfigurationPoint('classifiers', kind=dict, merge=True,\n                           description='''\n                           Classifiers allow you to tag metrics from this workload\n                           spec to help in post processing them. Theses are often\n                           used to help identify what runtime_parameters were used\n                           for results when post processing.\n                           '''),\n    ]\n    configuration = {cp.name: cp for cp in config_points}\n\n    @classmethod\n    def from_pod(cls, pod):\n        job_id = pod.pop('id')\n        instance = super(JobSpec, cls).from_pod(pod)\n        instance.id = job_id\n        return instance\n\n    @property\n    def section_id(self):\n        if self.id is not None:\n            return self.id.rsplit('-', 1)[0]\n\n    @property\n    def workload_id(self):\n        if self.id is not None:\n            return self.id.rsplit('-', 1)[-1]\n\n    def __init__(self):\n        super(JobSpec, self).__init__()\n        if self.classifiers is None:\n            self.classifiers = OrderedDict()\n        self.to_merge = defaultdict(OrderedDict)\n        self._sources = []\n        self.id = None\n        if self.boot_parameters is None:\n            self.boot_parameters = obj_dict()\n        if self.runtime_parameters is None:\n            self.runtime_parameters = obj_dict()\n\n    def to_pod(self):\n        pod = super(JobSpec, self).to_pod()\n        pod['id'] = self.id\n        return pod\n\n    def update_config(self, source, check_mandatory=True):  # pylint: disable=arguments-differ\n        self._sources.append(source)\n        values = source.config\n        for k, v in values.items():\n            if k == \"id\":\n                continue\n            elif k.endswith('_parameters'):\n                if v:\n                    self.to_merge[k][source] = copy(v)\n            else:\n                try:\n                    self.set(k, v, check_mandatory=check_mandatory)\n                except ConfigError as e:\n                    msg = 'Error in {}:\\n\\t{}'\n                    raise ConfigError(msg.format(source.name, e.message))\n\n    def merge_workload_parameters(self, plugin_cache):\n        # merge global generic and specific config\n        workload_params = plugin_cache.get_plugin_config(self.workload_name,\n                                                         generic_name=\"workload_parameters\",\n                                                         is_final=False)\n\n        cfg_points = plugin_cache.get_plugin_parameters(self.workload_name)\n        for source in self._sources:\n            config = dict(self.to_merge[\"workload_parameters\"].get(source, {}))\n            if not config:\n                continue\n\n            for name, cfg_point in cfg_points.items():\n                if name in config:\n                    value = config.pop(name)\n                    cfg_point.set_value(workload_params, value,\n                                        check_mandatory=False)\n            if config:\n                msg = 'Unexpected config \"{}\" for \"{}\"'\n                raise ConfigError(msg.format(config, self.workload_name))\n\n        self.workload_parameters = workload_params\n\n    def merge_runtime_parameters(self, plugin_cache, target_manager):\n\n        # Order global runtime parameters\n        runtime_parameters = OrderedDict()\n        try:\n            global_runtime_params = plugin_cache.get_plugin_config(\"runtime_parameters\")\n        except NotFoundError:\n            global_runtime_params = {}\n        for source in plugin_cache.sources:\n            if source in global_runtime_params:\n                runtime_parameters[source] = global_runtime_params[source]\n\n        # Add runtime parameters from JobSpec\n        for source, values in self.to_merge['runtime_parameters'].items():\n            runtime_parameters[source] = values\n\n        # Merge\n        self.runtime_parameters = target_manager.merge_runtime_parameters(runtime_parameters)\n\n    def finalize(self):\n        self.id = \"-\".join([str(source.config['id'])\n                            for source in self._sources[1:]])  # ignore first id, \"global\"\n\n        # ensure *_parameters are always obj_dict's\n        self.boot_parameters = obj_dict(list((self.boot_parameters or {}).items()))\n        self.runtime_parameters = obj_dict(list((self.runtime_parameters or {}).items()))\n        self.workload_parameters = obj_dict(list((self.workload_parameters or {}).items()))\n\n        if self.label is None:\n            self.label = self.workload_name\n\n\n# This is used to construct the list of Jobs WA will run\nclass JobGenerator(object):\n\n    name = \"Jobs Configuration\"\n\n    @property\n    def enabled_instruments(self):\n        self._read_augmentations = True\n        if self._enabled_instruments is None:\n            self._enabled_instruments = []\n            for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):\n                entry_cls = self.plugin_cache.get_plugin_class(entry)\n                if entry_cls.kind == 'instrument':\n                    self._enabled_instruments.append(entry)\n        return self._enabled_instruments\n\n    @property\n    def enabled_processors(self):\n        self._read_augmentations = True\n        if self._enabled_processors is None:\n            self._enabled_processors = []\n            for entry in list(self._enabled_augmentations.merge_with(self.disabled_augmentations).values()):\n                entry_cls = self.plugin_cache.get_plugin_class(entry)\n                if entry_cls.kind == 'output_processor':\n                    self._enabled_processors.append(entry)\n        return self._enabled_processors\n\n    def __init__(self, plugin_cache):\n        self.plugin_cache = plugin_cache\n        self.ids_to_run = []\n        self.workloads = []\n        self._enabled_augmentations = toggle_set()\n        self._enabled_instruments = None\n        self._enabled_processors = None\n        self._read_augmentations = False\n        self.disabled_augmentations = set()\n\n        self.job_spec_template = obj_dict(not_in_dict=['name'])\n        self.job_spec_template.name = \"globally specified job spec configuration\"\n        self.job_spec_template.id = \"global\"\n        # Load defaults\n        for cfg_point in JobSpec.configuration.values():\n            cfg_point.set_value(self.job_spec_template, check_mandatory=False)\n\n        self.root_node = SectionNode(self.job_spec_template)\n\n    def set_global_value(self, name, value):\n        JobSpec.configuration[name].set_value(self.job_spec_template, value,\n                                              check_mandatory=False)\n        if name == \"augmentations\":\n            self.update_augmentations(value)\n\n    def add_section(self, section, workloads, group):\n        new_node = self.root_node.add_section(section, group)\n        with log.indentcontext():\n            for workload in workloads:\n                new_node.add_workload(workload)\n\n    def add_workload(self, workload):\n        self.root_node.add_workload(workload)\n\n    def disable_augmentations(self, augmentations):\n        for entry in augmentations:\n            if entry == '~~':\n                continue\n            if entry.startswith('~'):\n                entry = entry[1:]\n            try:\n                self.plugin_cache.get_plugin_class(entry)\n            except NotFoundError:\n                raise ConfigError('Error disabling unknown augmentation: \"{}\"'.format(entry))\n        self.disabled_augmentations = self.disabled_augmentations.union(augmentations)\n\n    def update_augmentations(self, value):\n        if self._read_augmentations:\n            msg = 'Cannot update augmentations after they have been accessed'\n            raise RuntimeError(msg)\n        self._enabled_augmentations = self._enabled_augmentations.merge_with(value)\n\n    def only_run_ids(self, ids):\n        if isinstance(ids, str):\n            ids = [ids]\n        self.ids_to_run = ids\n\n    def generate_job_specs(self, target_manager):\n        specs = []\n        for leaf in self.root_node.leaves():\n            workload_entries = leaf.workload_entries\n            sections = [leaf]\n            for ancestor in leaf.ancestors():\n                workload_entries = ancestor.workload_entries + workload_entries\n                sections.insert(0, ancestor)\n\n            for workload_entry in workload_entries:\n                job_spec = create_job_spec(deepcopy(workload_entry), sections,\n                                           target_manager, self.plugin_cache,\n                                           self.disabled_augmentations)\n                if self.ids_to_run:\n                    for job_id in self.ids_to_run:\n                        if job_id in job_spec.id:\n                            break\n                    else:\n                        continue\n                self.update_augmentations(list(job_spec.augmentations.values()))\n                specs.append(job_spec)\n        return specs\n\n\ndef create_job_spec(workload_entry, sections, target_manager, plugin_cache,\n                    disabled_augmentations):\n    job_spec = JobSpec()\n\n    # PHASE 2.1: Merge general job spec configuration\n    for section in sections:\n        job_spec.update_config(section, check_mandatory=False)\n\n        # Add classifiers for any present groups\n        if section.id == 'global' or section.group is None:\n            # Ignore global config and default group\n            continue\n        job_spec.classifiers[section.group] = section.id\n    job_spec.update_config(workload_entry, check_mandatory=False)\n\n    # PHASE 2.2: Merge global, section and workload entry \"workload_parameters\"\n    job_spec.merge_workload_parameters(plugin_cache)\n\n    # TODO: PHASE 2.3: Validate device runtime/boot parameters\n    job_spec.merge_runtime_parameters(plugin_cache, target_manager)\n    target_manager.validate_runtime_parameters(job_spec.runtime_parameters)\n\n    # PHASE 2.4: Disable globally disabled augmentations\n    job_spec.set(\"augmentations\", disabled_augmentations)\n    job_spec.finalize()\n\n    return job_spec\n\n\ndef get_config_point_map(params):\n    pmap = {}\n    for p in params:\n        pmap[p.name] = p\n        for alias in p.aliases:\n            pmap[alias] = p\n    return pmap\n\n\nsettings = MetaConfiguration(os.environ)\n"
  },
  {
    "path": "wa/framework/configuration/default.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom wa.framework.configuration.core import MetaConfiguration, RunConfiguration\nfrom wa.framework.configuration.plugin_cache import PluginCache\nfrom wa.utils.serializer import yaml\nfrom wa.utils.doc import strip_inlined_text\n\n\nDEFAULT_AUGMENTATIONS = [\n    'execution_time',\n    'interrupts',\n    'cpufreq',\n    'status',\n    'csv',\n]\n\n\ndef _format_yaml_comment(param, short_description=False):\n    comment = param.description\n    comment = strip_inlined_text(comment)\n    if short_description:\n        comment = comment.split('\\n\\n')[0]\n    comment = comment.replace('\\n', '\\n# ')\n    comment = \"# {}\\n\".format(comment)\n    return comment\n\n\ndef _format_augmentations(output):\n    plugin_cache = PluginCache()\n    output.write(\"augmentations:\\n\")\n    for plugin in DEFAULT_AUGMENTATIONS:\n        plugin_cls = plugin_cache.loader.get_plugin_class(plugin)\n        output.writelines(_format_yaml_comment(plugin_cls, short_description=True))\n        output.write(\" - {}\\n\".format(plugin))\n        output.write(\"\\n\")\n\n\ndef generate_default_config(path):\n    with open(path, 'w') as output:\n        for param in MetaConfiguration.config_points + RunConfiguration.config_points:\n            entry = {param.name: param.default}\n            write_param_yaml(entry, param, output)\n        _format_augmentations(output)\n\n\ndef write_param_yaml(entry, param, output):\n    comment = _format_yaml_comment(param)\n    output.writelines(comment)\n    yaml.dump(entry, output, default_flow_style=False)\n    output.write(\"\\n\")\n"
  },
  {
    "path": "wa/framework/configuration/execution.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport random\nfrom itertools import groupby, chain\n\nfrom future.moves.itertools import zip_longest\n\nfrom devlib.utils.types import identifier\n\nfrom wa.framework.configuration.core import (MetaConfiguration, RunConfiguration,\n                                             JobGenerator, settings)\nfrom wa.framework.configuration.parsers import ConfigParser\nfrom wa.framework.configuration.plugin_cache import PluginCache\nfrom wa.framework.exception import NotFoundError, ConfigError\nfrom wa.framework.job import Job\nfrom wa.utils import log\nfrom wa.utils.serializer import Podable\n\n\nclass CombinedConfig(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        instance = super(CombinedConfig, CombinedConfig).from_pod(pod)\n        instance.settings = MetaConfiguration.from_pod(pod.get('settings', {}))\n        instance.run_config = RunConfiguration.from_pod(pod.get('run_config', {}))\n        return instance\n\n    def __init__(self, settings=None, run_config=None):  # pylint: disable=redefined-outer-name\n        super(CombinedConfig, self).__init__()\n        self.settings = settings\n        self.run_config = run_config\n\n    def to_pod(self):\n        pod = super(CombinedConfig, self).to_pod()\n        pod['settings'] = self.settings.to_pod()\n        pod['run_config'] = self.run_config.to_pod()\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n\nclass ConfigManager(object):\n    \"\"\"\n    Represents run-time state of WA. Mostly used as a container for loaded\n    configuration and discovered plugins.\n\n    This exists outside of any command or run and is associated with the running\n    instance of wA itself.\n    \"\"\"\n\n    @property\n    def enabled_instruments(self):\n        return self.jobs_config.enabled_instruments\n\n    @property\n    def enabled_processors(self):\n        return self.jobs_config.enabled_processors\n\n    @property\n    def job_specs(self):\n        if not self._jobs_generated:\n            msg = 'Attempting to access job specs before '\\\n                  'jobs have been generated'\n            raise RuntimeError(msg)\n        return [j.spec for j in self._jobs]\n\n    @property\n    def jobs(self):\n        if not self._jobs_generated:\n            msg = 'Attempting to access jobs before '\\\n                  'they have been generated'\n            raise RuntimeError(msg)\n        return self._jobs\n\n    def __init__(self, settings=settings):  # pylint: disable=redefined-outer-name\n        self.settings = settings\n        self.run_config = RunConfiguration()\n        self.plugin_cache = PluginCache()\n        self.jobs_config = JobGenerator(self.plugin_cache)\n        self.loaded_config_sources = []\n        self._config_parser = ConfigParser()\n        self._jobs = []\n        self._jobs_generated = False\n        self.agenda = None\n\n    def load_config_file(self, filepath):\n        includes = self._config_parser.load_from_path(self, filepath)\n        self.loaded_config_sources.append(filepath)\n        self.loaded_config_sources.extend(includes)\n\n    def load_config(self, values, source):\n        self._config_parser.load(self, values, source)\n        self.loaded_config_sources.append(source)\n\n    def get_plugin(self, name=None, kind=None, *args, **kwargs):\n        return self.plugin_cache.get_plugin(identifier(name), kind, *args, **kwargs)\n\n    def get_instruments(self, target):\n        instruments = []\n        for name in self.enabled_instruments:\n            try:\n                instruments.append(self.get_plugin(name, kind='instrument',\n                                                   target=target))\n            except NotFoundError:\n                msg = 'Instrument \"{}\" not found'\n                raise NotFoundError(msg.format(name))\n        return instruments\n\n    def get_processors(self):\n        processors = []\n        for name in self.enabled_processors:\n            try:\n                proc = self.plugin_cache.get_plugin(name, kind='output_processor')\n            except NotFoundError:\n                msg = 'Output Processor \"{}\" not found'\n                raise NotFoundError(msg.format(name))\n            processors.append(proc)\n        return processors\n\n    def get_config(self):\n        return CombinedConfig(self.settings, self.run_config)\n\n    def finalize(self):\n        if not self.agenda:\n            msg = 'Attempting to finalize config before agenda has been set'\n            raise RuntimeError(msg)\n        self.run_config.merge_device_config(self.plugin_cache)\n        return self.get_config()\n\n    def generate_jobs(self, context):\n        job_specs = self.jobs_config.generate_job_specs(context.tm)\n        if not job_specs:\n            msg = 'No jobs available for running.'\n            raise ConfigError(msg)\n        exec_order = self.run_config.execution_order\n        log.indent()\n        for spec, i in permute_iterations(job_specs, exec_order):\n            job = Job(spec, i, context)\n            job.load(context.tm.target)\n            self._jobs.append(job)\n            context.run_state.add_job(job)\n        log.dedent()\n        self._jobs_generated = True\n\n\ndef permute_by_workload(specs):\n    \"\"\"\n    This is that \"classic\" implementation that executes all iterations of a\n    workload spec before proceeding onto the next spec.\n\n    \"\"\"\n    for spec in specs:\n        for i in range(1, spec.iterations + 1):\n            yield (spec, i)\n\n\ndef permute_by_iteration(specs):\n    \"\"\"\n    Runs the first iteration for all benchmarks first, before proceeding to the\n    next iteration, i.e. A1, B1, C1, A2, B2, C2...  instead of  A1, A1, B1, B2,\n    C1, C2...\n\n    If multiple sections where specified in the agenda, this will run all\n    sections for the first global spec first, followed by all sections for the\n    second spec, etc.\n\n    e.g. given sections X and Y, and global specs A and B, with 2 iterations,\n    this will run\n\n    X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2\n\n    \"\"\"\n    groups = [list(g) for _, g in groupby(specs, lambda s: s.workload_id)]\n\n    all_tuples = []\n    for spec in chain(*groups):\n        all_tuples.append([(spec, i + 1)\n                           for i in range(spec.iterations)])\n    for t in chain(*list(map(list, zip_longest(*all_tuples)))):\n        if t is not None:\n            yield t\n\n\ndef permute_by_section(specs):\n    \"\"\"\n    Runs the first iteration for all benchmarks first, before proceeding to the\n    next iteration, i.e. A1, B1, C1, A2, B2, C2...  instead of  A1, A1, B1, B2,\n    C1, C2...\n\n    If multiple sections where specified in the agenda, this will run all specs\n    for the first section followed by all specs for the seciod section, etc.\n\n    e.g. given sections X and Y, and global specs A and B, with 2 iterations,\n    this will run\n\n    X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2\n\n    \"\"\"\n    groups = [list(g) for _, g in groupby(specs, lambda s: s.section_id)]\n\n    all_tuples = []\n    for spec in chain(*groups):\n        all_tuples.append([(spec, i + 1)\n                           for i in range(spec.iterations)])\n    for t in chain(*list(map(list, zip_longest(*all_tuples)))):\n        if t is not None:\n            yield t\n\n\ndef permute_randomly(specs):\n    \"\"\"\n    This will generate a random permutation of specs/iteration tuples.\n\n    \"\"\"\n    result = []\n    for spec in specs:\n        for i in range(1, spec.iterations + 1):\n            result.append((spec, i))\n    random.shuffle(result)\n    for t in result:\n        yield t\n\n\npermute_map = {\n    'by_iteration': permute_by_iteration,\n    'by_workload': permute_by_workload,\n    'by_section': permute_by_section,\n    'random': permute_randomly,\n}\n\n\ndef permute_iterations(specs, exec_order):\n    if exec_order not in permute_map:\n        msg = 'Unknown execution order \"{}\"; must be in: {}'\n        raise ValueError(msg.format(exec_order, list(permute_map.keys())))\n    return permute_map[exec_order](specs)\n"
  },
  {
    "path": "wa/framework/configuration/parsers.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# pylint: disable=no-self-use\n\nimport os\nimport logging\nfrom functools import reduce  # pylint: disable=redefined-builtin\n\nfrom devlib.utils.types import identifier\n\nfrom wa.framework.configuration.core import JobSpec\nfrom wa.framework.exception import ConfigError\nfrom wa.utils import log\nfrom wa.utils.serializer import json, read_pod, SerializerSyntaxError\nfrom wa.utils.types import toggle_set, counter\nfrom wa.utils.misc import merge_config_values, isiterable\n\n\nlogger = logging.getLogger('config')\n\n\nclass ConfigParser(object):\n\n    def load_from_path(self, state, filepath):\n        raw, includes = _load_file(filepath, \"Config\")\n        self.load(state, raw, filepath)\n        return includes\n\n    def load(self, state, raw, source, wrap_exceptions=True):  # pylint: disable=too-many-branches\n        logger.debug('Parsing config from \"{}\"'.format(source))\n        log.indent()\n        try:\n            state.plugin_cache.add_source(source)\n            if 'run_name' in raw:\n                msg = '\"run_name\" can only be specified in the config '\\\n                      'section of an agenda'\n                raise ConfigError(msg)\n\n            if 'id' in raw:\n                raise ConfigError('\"id\" cannot be set globally')\n\n            merge_augmentations(raw)\n\n            # Get WA core configuration\n            for cfg_point in state.settings.configuration.values():\n                value = pop_aliased_param(cfg_point, raw)\n                if value is not None:\n                    logger.debug('Setting meta \"{}\" to \"{}\"'.format(cfg_point.name, value))\n                    state.settings.set(cfg_point.name, value)\n\n            # Get run specific configuration\n            for cfg_point in state.run_config.configuration.values():\n                value = pop_aliased_param(cfg_point, raw)\n                if value is not None:\n                    logger.debug('Setting run \"{}\" to \"{}\"'.format(cfg_point.name, value))\n                    state.run_config.set(cfg_point.name, value)\n\n            # Get global job spec configuration\n            for cfg_point in JobSpec.configuration.values():\n                value = pop_aliased_param(cfg_point, raw)\n                if value is not None:\n                    logger.debug('Setting global \"{}\" to \"{}\"'.format(cfg_point.name, value))\n                    state.jobs_config.set_global_value(cfg_point.name, value)\n\n            for name, values in raw.items():\n                # Assume that all leftover config is for a plug-in or a global\n                # alias it is up to PluginCache to assert this assumption\n                logger.debug('Caching \"{}\" with \"{}\"'.format(identifier(name), values))\n                state.plugin_cache.add_configs(identifier(name), values, source)\n\n        except ConfigError as e:\n            if wrap_exceptions:\n                raise ConfigError('Error in \"{}\":\\n{}'.format(source, str(e)))\n            else:\n                raise e\n        finally:\n            log.dedent()\n\n\nclass AgendaParser(object):\n\n    def load_from_path(self, state, filepath):\n        raw, includes = _load_file(filepath, 'Agenda')\n        self.load(state, raw, filepath)\n        return includes\n\n    def load(self, state, raw, source):\n        logger.debug('Parsing agenda from \"{}\"'.format(source))\n        log.indent()\n        try:\n            if not isinstance(raw, dict):\n                raise ConfigError('Invalid agenda, top level entry must be a dict')\n\n            self._populate_and_validate_config(state, raw, source)\n            sections = self._pop_sections(raw)\n            global_workloads = self._pop_workloads(raw)\n            if not global_workloads:\n                msg = 'No jobs avaliable. Please ensure you have specified at '\\\n                      'least one workload to run.'\n                raise ConfigError(msg)\n\n            if raw:\n                msg = 'Invalid top level agenda entry(ies): \"{}\"'\n                raise ConfigError(msg.format('\", \"'.join(list(raw.keys()))))\n\n            sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)\n            self._process_global_workloads(state, global_workloads, wkl_ids)\n            self._process_sections(state, sections, sect_ids, wkl_ids)\n\n            state.agenda = source\n\n        except (ConfigError, SerializerSyntaxError) as e:\n            raise ConfigError('Error in \"{}\":\\n\\t{}'.format(source, str(e)))\n        finally:\n            log.dedent()\n\n    def _populate_and_validate_config(self, state, raw, source):\n        for name in ['config', 'global']:\n            entry = raw.pop(name, None)\n            if entry is None:\n                continue\n\n            if not isinstance(entry, dict):\n                msg = 'Invalid entry \"{}\" - must be a dict'\n                raise ConfigError(msg.format(name))\n\n            if 'run_name' in entry:\n                value = entry.pop('run_name')\n                logger.debug('Setting run name to \"{}\"'.format(value))\n                state.run_config.set('run_name', value)\n\n            state.load_config(entry, '{}/{}'.format(source, name))\n\n    def _pop_sections(self, raw):\n        sections = raw.pop(\"sections\", [])\n        if not isinstance(sections, list):\n            raise ConfigError('Invalid entry \"sections\" - must be a list')\n        for section in sections:\n            if not hasattr(section, 'items'):\n                raise ConfigError('Invalid section \"{}\" - must be a dict'.format(section))\n        return sections\n\n    def _pop_workloads(self, raw):\n        workloads = raw.pop(\"workloads\", [])\n        if not isinstance(workloads, list):\n            raise ConfigError('Invalid entry \"workloads\" - must be a list')\n        return workloads\n\n    def _collect_ids(self, sections, global_workloads):\n        seen_section_ids = set()\n        seen_workload_ids = set()\n\n        for workload in global_workloads:\n            workload = _get_workload_entry(workload)\n            _collect_valid_id(workload.get(\"id\"), seen_workload_ids, \"workload\")\n\n        for section in sections:\n            _collect_valid_id(section.get(\"id\"), seen_section_ids, \"section\")\n            for workload in section[\"workloads\"] if \"workloads\" in section else []:\n                workload = _get_workload_entry(workload)\n                _collect_valid_id(workload.get(\"id\"), seen_workload_ids,\n                                  \"workload\")\n\n        return seen_section_ids, seen_workload_ids\n\n    def _process_global_workloads(self, state, global_workloads, seen_wkl_ids):\n        for workload_entry in global_workloads:\n            workload = _process_workload_entry(workload_entry, seen_wkl_ids,\n                                               state.jobs_config)\n            state.jobs_config.add_workload(workload)\n\n    def _process_sections(self, state, sections, seen_sect_ids, seen_wkl_ids):\n        for section in sections:\n            workloads = []\n            for workload_entry in section.pop(\"workloads\", []):\n                workload = _process_workload_entry(workload_entry, seen_wkl_ids,\n                                                   state.jobs_config)\n                workloads.append(workload)\n\n            if 'params' in section:\n                if 'runtime_params' in section:\n                    msg = 'both \"params\" and \"runtime_params\" specified in a '\\\n                          'section: \"{}\"'\n                    raise ConfigError(msg.format(json.dumps(section, indent=None)))\n                section['runtime_params'] = section.pop('params')\n\n            group = section.pop('group', None)\n            section = _construct_valid_entry(section, seen_sect_ids,\n                                             \"s\", state.jobs_config)\n            state.jobs_config.add_section(section, workloads, group)\n\n\n########################\n### Helper functions ###\n########################\n\ndef pop_aliased_param(cfg_point, d, default=None):\n    \"\"\"\n    Given a ConfigurationPoint and a dict, this function will search the dict for\n    the ConfigurationPoint's name/aliases. If more than one is found it will raise\n    a ConfigError. If one (and only one) is found then it will return the value\n    for the ConfigurationPoint. If the name or aliases are present in the dict it will\n    return the \"default\" parameter of this function.\n    \"\"\"\n    aliases = [cfg_point.name] + cfg_point.aliases\n    alias_map = [a for a in aliases if a in d]\n    if len(alias_map) > 1:\n        raise ConfigError('Duplicate entry: {}'.format(aliases))\n    elif alias_map:\n        return d.pop(alias_map[0])\n    else:\n        return default\n\n\ndef _load_file(filepath, error_name):\n    if not os.path.isfile(filepath):\n        raise ValueError(\"{} does not exist\".format(filepath))\n    try:\n        raw = read_pod(filepath)\n        includes = _process_includes(raw, filepath, error_name)\n    except SerializerSyntaxError as e:\n        raise ConfigError('Error parsing {} {}: {}'.format(error_name, filepath, e))\n    if not isinstance(raw, dict):\n        message = '{} does not contain a valid {} structure; top level must be a dict.'\n        raise ConfigError(message.format(filepath, error_name))\n    return raw, includes\n\n\ndef _config_values_from_includes(filepath, include_path, error_name):\n    source_dir = os.path.dirname(filepath)\n    included_files = []\n\n    if isinstance(include_path, str):\n        include_path = os.path.expanduser(os.path.join(source_dir, include_path))\n\n        replace_value, includes = _load_file(include_path, error_name)\n\n        included_files.append(include_path)\n        included_files.extend(includes)\n    elif isinstance(include_path, list):\n        replace_value = {}\n\n        for path in include_path:\n            include_path = os.path.expanduser(os.path.join(source_dir, path))\n\n            sub_replace_value, includes = _load_file(include_path, error_name)\n            for key, val in sub_replace_value.items():\n                replace_value[key] = merge_config_values(val, replace_value.get(key, None))\n\n            included_files.append(include_path)\n            included_files.extend(includes)\n    else:\n        message = \"{} does not contain a valid {} structure; value for 'include#' must be a string or a list\"\n        raise ConfigError(message.format(filepath, error_name))\n\n    return replace_value, included_files\n\n\ndef _process_includes(raw, filepath, error_name):\n    if not raw:\n        return []\n\n    included_files = []\n    replace_value = None\n\n    if hasattr(raw, 'items'):\n        for key, value in raw.items():\n            if key == 'include#':\n                replace_value, includes = _config_values_from_includes(filepath, value, error_name)\n                included_files.extend(includes)\n            elif hasattr(value, 'items') or isiterable(value):\n                includes = _process_includes(value, filepath, error_name)\n                included_files.extend(includes)\n    elif isiterable(raw):\n        for element in raw:\n            if hasattr(element, 'items') or isiterable(element):\n                includes = _process_includes(element, filepath, error_name)\n                included_files.extend(includes)\n\n    if replace_value is not None:\n        del raw['include#']\n        for key, value in replace_value.items():\n            raw[key] = merge_config_values(value, raw.get(key, None))\n\n    return included_files\n\n\ndef merge_augmentations(raw):\n    \"\"\"\n    Since, from configuration perspective, output processors and instruments are\n    handled identically, the configuration entries are now interchangeable. E.g. it is\n    now valid to specify a output processor in an instruments list. This is to make things\n    easier for the users, as, from their perspective, the distinction is somewhat arbitrary.\n\n    For backwards compatibility, both entries are still valid, and this\n    function merges them together into a single \"augmentations\" set, ensuring\n    that there are no conflicts between the entries.\n\n    \"\"\"\n    cfg_point = JobSpec.configuration['augmentations']\n    names = [cfg_point.name, ] + cfg_point.aliases\n\n    entries = []\n    for n in names:\n        if n not in raw:\n            continue\n        value = raw.pop(n)\n        try:\n            entries.append(toggle_set(value))\n        except TypeError as exc:\n            msg = 'Invalid value \"{}\" for \"{}\": {}'\n            raise ConfigError(msg.format(value, n, exc))\n\n    # Make sure none of the specified aliases conflict with each other\n    to_check = list(entries)\n    while len(to_check) > 1:\n        check_entry = to_check.pop()\n        for e in to_check:\n            conflicts = check_entry.conflicts_with(e)\n            if conflicts:\n                msg = '\"{}\" and \"{}\" have conflicting entries: {}'\n                conflict_string = ', '.join('\"{}\"'.format(c.strip(\"~\"))\n                                            for c in conflicts)\n                raise ConfigError(msg.format(check_entry, e, conflict_string))\n\n    if entries:\n        raw['augmentations'] = reduce(lambda x, y: x.union(y), entries)\n\n\ndef _pop_aliased(d, names, entry_id):\n    name_count = sum(1 for n in names if n in d)\n    if name_count > 1:\n        names_list = ', '.join(names)\n        msg = 'Invalid workload entry \"{}\": at most one of ({}}) must be specified.'\n        raise ConfigError(msg.format(entry_id, names_list))\n    for name in names:\n        if name in d:\n            return d.pop(name)\n    return None\n\n\ndef _construct_valid_entry(raw, seen_ids, prefix, jobs_config):\n    workload_entry = {}\n\n    # Generate an automatic ID if the entry doesn't already have one\n    if 'id' not in raw:\n        while True:\n            new_id = '{}{}'.format(prefix, counter(name=prefix))\n            if new_id not in seen_ids:\n                break\n        workload_entry['id'] = new_id\n        seen_ids.add(new_id)\n    else:\n        workload_entry['id'] = raw.pop('id')\n\n    # Process instruments\n    merge_augmentations(raw)\n\n    # Validate all workload_entry\n    for name, cfg_point in JobSpec.configuration.items():\n        value = pop_aliased_param(cfg_point, raw)\n        if value is not None:\n            value = cfg_point.kind(value)\n            cfg_point.validate_value(name, value)\n            workload_entry[name] = value\n\n    if \"augmentations\" in workload_entry:\n        if '~~' in workload_entry['augmentations']:\n            msg = '\"~~\" can only be specfied in top-level config, and not for individual workloads/sections'\n            raise ConfigError(msg)\n        jobs_config.update_augmentations(workload_entry['augmentations'])\n\n    # error if there are unknown workload_entry\n    if raw:\n        msg = 'Invalid entry(ies) in \"{}\": \"{}\"'\n        raise ConfigError(msg.format(workload_entry['id'], ', '.join(list(raw.keys()))))\n\n    return workload_entry\n\n\ndef _collect_valid_id(entry_id, seen_ids, entry_type):\n    if entry_id is None:\n        return\n    entry_id = str(entry_id)\n    if entry_id in seen_ids:\n        raise ConfigError('Duplicate {} ID \"{}\".'.format(entry_type, entry_id))\n    # \"-\" is reserved for joining section and workload IDs\n    if \"-\" in entry_id:\n        msg = 'Invalid {} ID \"{}\"; IDs cannot contain a \"-\"'\n        raise ConfigError(msg.format(entry_type, entry_id))\n    if entry_id == \"global\":\n        msg = 'Invalid {} ID \"global\"; is a reserved ID'\n        raise ConfigError(msg.format(entry_type))\n    seen_ids.add(entry_id)\n\n\ndef _get_workload_entry(workload):\n    if isinstance(workload, str):\n        workload = {'name': workload}\n    elif not isinstance(workload, dict):\n        raise ConfigError('Invalid workload entry: \"{}\"')\n    return workload\n\n\ndef _process_workload_entry(workload, seen_workload_ids, jobs_config):\n    workload = _get_workload_entry(workload)\n    workload = _construct_valid_entry(workload, seen_workload_ids,\n                                      \"wk\", jobs_config)\n    if \"workload_name\" not in workload:\n        raise ConfigError('No workload name specified in entry {}'.format(workload['id']))\n    return workload\n"
  },
  {
    "path": "wa/framework/configuration/plugin_cache.py",
    "content": "#    Copyright 2016-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom copy import copy\nfrom collections import defaultdict\nfrom itertools import chain\n\nfrom devlib.utils.misc import memoized\n\nfrom wa.framework import pluginloader\nfrom wa.framework.configuration.core import get_config_point_map\nfrom wa.framework.exception import ConfigError, NotFoundError\nfrom wa.framework.target.descriptor import list_target_descriptions\nfrom wa.utils.types import obj_dict, caseless_string\n\nGENERIC_CONFIGS = [\"device_config\", \"workload_parameters\",\n                   \"boot_parameters\", \"runtime_parameters\"]\n\n\nclass PluginCache(object):\n    \"\"\"\n    The plugin cache is used to store configuration that cannot be processed at\n    this stage, whether thats because it is unknown if its needed\n    (in the case of disabled plug-ins) or it is not know what it belongs to (in\n    the case of \"device-config\" ect.). It also maintains where configuration came\n    from, and the priority order of said sources.\n    \"\"\"\n\n    def __init__(self, loader=pluginloader):\n        self.loader = loader\n        self.sources = []\n        self.plugin_configs = defaultdict(lambda: defaultdict(dict))\n        self.global_alias_values = defaultdict(dict)\n        self.targets = {td.name: td for td in list_target_descriptions()}\n\n        # Generate a mapping of what global aliases belong to\n        self._global_alias_map = defaultdict(dict)\n        self._list_of_global_aliases = set()\n        for plugin in self.loader.list_plugins():\n            for param in plugin.parameters:\n                if param.global_alias:\n                    self._global_alias_map[plugin.name][param.global_alias] = param\n                    self._list_of_global_aliases.add(param.global_alias)\n\n    def add_source(self, source):\n        if source in self.sources:\n            msg = \"Source '{}' has already been added.\"\n            raise Exception(msg.format(source))\n        self.sources.append(source)\n\n    def add_global_alias(self, alias, value, source):\n        if source not in self.sources:\n            msg = \"Source '{}' has not been added to the plugin cache.\"\n            raise RuntimeError(msg.format(source))\n\n        if not self.is_global_alias(alias):\n            msg = \"'{} is not a valid global alias'\"\n            raise RuntimeError(msg.format(alias))\n\n        self.global_alias_values[alias][source] = value\n\n    def add_configs(self, plugin_name, values, source):\n        if self.is_global_alias(plugin_name):\n            self.add_global_alias(plugin_name, values, source)\n            return\n\n        if source not in self.sources:\n            msg = \"Source '{}' has not been added to the plugin cache.\"\n            raise RuntimeError(msg.format(source))\n\n        if caseless_string(plugin_name) in ['global', 'config']:\n            msg = '\"{}\" entry specified inside config/global section; If this is ' \\\n                  'defined in a config file, move the entry content into the top level'\n            raise ConfigError(msg.format((plugin_name)))\n\n        if (not self.loader.has_plugin(plugin_name)\n                and plugin_name not in self.targets\n                and plugin_name not in GENERIC_CONFIGS):\n            msg = 'configuration provided for unknown plugin \"{}\"'\n            raise ConfigError(msg.format(plugin_name))\n\n        if not hasattr(values, 'items'):\n            msg = 'Plugin configuration for \"{}\" not a dictionary ({} is {})'\n            raise ConfigError(msg.format(plugin_name, repr(values), type(values)))\n\n        for name, value in values.items():\n            if (plugin_name not in GENERIC_CONFIGS\n                    and name not in self.get_plugin_parameters(plugin_name)):\n                msg = \"'{}' is not a valid parameter for '{}'\"\n                raise ConfigError(msg.format(name, plugin_name))\n\n            self.plugin_configs[plugin_name][source][name] = value\n\n    def is_global_alias(self, name):\n        return name in self._list_of_global_aliases\n\n    def list_plugins(self, kind=None):\n        return self.loader.list_plugins(kind)\n\n    def get_plugin_config(self, plugin_name, generic_name=None, is_final=True):\n        config = obj_dict(not_in_dict=['name'])\n        config.name = plugin_name\n\n        if plugin_name not in GENERIC_CONFIGS:\n            self._set_plugin_defaults(plugin_name, config)\n            self._set_from_global_aliases(plugin_name, config)\n\n        if generic_name is None:\n            # Perform a simple merge with the order of sources representing\n            # priority\n            plugin_config = self.plugin_configs[plugin_name]\n            cfg_points = self.get_plugin_parameters(plugin_name)\n            for source in self.sources:\n                if source not in plugin_config:\n                    continue\n                for name, value in plugin_config[source].items():\n                    cfg_points[name].set_value(config, value=value)\n        else:\n            # A more complicated merge that involves priority of sources and\n            # specificity\n            self._merge_using_priority_specificity(plugin_name, generic_name,\n                                                   config, is_final)\n\n        return config\n\n    def get_plugin(self, name, kind=None, *args, **kwargs):\n        config = self.get_plugin_config(name)\n        kwargs = dict(list(config.items()) + list(kwargs.items()))\n        return self.loader.get_plugin(name, kind=kind, *args, **kwargs)\n\n    def get_plugin_class(self, name, kind=None):\n        return self.loader.get_plugin_class(name, kind)\n\n    @memoized\n    def get_plugin_parameters(self, name):\n        if name in self.targets:\n            return self._get_target_params(name)\n        params = self.loader.get_plugin_class(name).parameters\n        return get_config_point_map(params)\n\n    def resolve_alias(self, name):\n        return self.loader.resolve_alias(name)\n\n    def _set_plugin_defaults(self, plugin_name, config):\n        cfg_points = self.get_plugin_parameters(plugin_name)\n        for cfg_point in cfg_points.values():\n            cfg_point.set_value(config, check_mandatory=False)\n\n        try:\n            _, alias_params = self.resolve_alias(plugin_name)\n            for name, value in alias_params.items():\n                cfg_points[name].set_value(config, value)\n        except NotFoundError:\n            pass\n\n    def _set_from_global_aliases(self, plugin_name, config):\n        for alias, param in self._global_alias_map[plugin_name].items():\n            if alias in self.global_alias_values:\n                for source in self.sources:\n                    if source not in self.global_alias_values[alias]:\n                        continue\n                    val = self.global_alias_values[alias][source]\n                    param.set_value(config, value=val)\n\n    def _get_target_params(self, name):\n        td = self.targets[name]\n        return get_config_point_map(chain(td.target_params, td.platform_params, td.conn_params, td.assistant_params))\n\n    # pylint: disable=too-many-nested-blocks, too-many-branches\n    def _merge_using_priority_specificity(self, specific_name,\n                                          generic_name, merged_config, is_final=True):\n        \"\"\"\n        WA configuration can come from various sources of increasing priority,\n        as well as being specified in a generic and specific manner (e.g\n        ``device_config`` and ``nexus10`` respectivly). WA has two rules for\n        the priority of configuration:\n\n            - Configuration from higher priority sources overrides\n              configuration from lower priority sources.\n            - More specific configuration overrides less specific configuration.\n\n        There is a situation where these two rules come into conflict. When a\n        generic configuration is given in config source of high priority and a\n        specific configuration is given in a config source of lower priority.\n        In this situation it is not possible to know the end users intention\n        and WA will error.\n\n        :param specific_name: The name of the specific configuration used\n                              e.g ``nexus10``\n        :param generic_name: The name of the generic configuration\n                             e.g ``device_config``\n        :param merge_config: A dict of ``ConfigurationPoint``s to be used when\n                             merging configuration.  keys=config point name,\n                             values=config point\n        :param is_final: if ``True`` (the default) make sure that mandatory\n                         parameters are set.\n\n        :rtype: A fully merged and validated configuration in the form of a\n                obj_dict.\n        \"\"\"\n        ms = MergeState()\n        ms.generic_name = generic_name\n        ms.specific_name = specific_name\n        ms.generic_config = copy(self.plugin_configs[generic_name])\n        ms.specific_config = copy(self.plugin_configs[specific_name])\n        ms.cfg_points = self.get_plugin_parameters(specific_name)\n        sources = self.sources\n\n        # set_value uses the 'name' attribute of the passed object in it error\n        # messages, to ensure these messages make sense the name will have to be\n        # changed several times during this function.\n        merged_config.name = specific_name\n\n        for source in sources:\n            try:\n                update_config_from_source(merged_config, source, ms)\n            except ConfigError as e:\n                raise ConfigError('Error in \"{}\":\\n\\t{}'.format(source, str(e)))\n\n        # Validate final configuration\n        merged_config.name = specific_name\n        for cfg_point in ms.cfg_points.values():\n            cfg_point.validate(merged_config, check_mandatory=is_final)\n\n    def __getattr__(self, name):\n        \"\"\"\n        This resolves methods for specific plugins types based on corresponding\n        generic plugin methods. So it's possible to say things like ::\n\n            loader.get_device('foo')\n\n        instead of ::\n\n            loader.get_plugin('foo', kind='device')\n\n        \"\"\"\n        error_msg = 'No plugins of type \"{}\" discovered'\n        if name.startswith('get_'):\n            name = name.replace('get_', '', 1)\n            if name in self.loader.kind_map:\n                def __wrapper(pname, *args, **kwargs):\n                    return self.get_plugin(pname, name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        if name.startswith('list_'):\n            name = name.replace('list_', '', 1).rstrip('s')\n            if name in self.loader.kind_map:\n                def __wrapper(*args, **kwargs):  # pylint: disable=E0102\n                    return self.list_plugins(name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        if name.startswith('has_'):\n            name = name.replace('has_', '', 1)\n            if name in self.loader.kind_map:\n                def __wrapper(pname, *args, **kwargs):  # pylint: disable=E0102\n                    return self.loader.has_plugin(pname, name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        raise AttributeError(name)\n\n\nclass MergeState(object):\n\n    def __init__(self):\n        self.generic_name = None\n        self.specific_name = None\n        self.generic_config = None\n        self.specific_config = None\n        self.cfg_points = None\n        self.seen_specific_config = defaultdict(list)\n\n\ndef update_config_from_source(final_config, source, state):\n    if source in state.generic_config:\n        final_config.name = state.generic_name\n        for name, cfg_point in state.cfg_points.items():\n            if name in state.generic_config[source]:\n                if name in state.seen_specific_config:\n                    msg = ('\"{generic_name}\" configuration \"{config_name}\" has '\n                           'already been specified more specifically for '\n                           '{specific_name} in:\\n\\t\\t{sources}')\n                    seen_sources = state.seen_specific_config[name]\n                    msg = msg.format(generic_name=state.generic_name,\n                                     config_name=name,\n                                     specific_name=state.specific_name,\n                                     sources=\", \".join(seen_sources))\n                    raise ConfigError(msg)\n                value = state.generic_config[source].pop(name)\n                cfg_point.set_value(final_config, value, check_mandatory=False)\n\n        if state.generic_config[source]:\n            msg = 'Unexpected values for {}: {}'\n            raise ConfigError(msg.format(state.generic_name,\n                                         state.generic_config[source]))\n\n    if source in state.specific_config:\n        final_config.name = state.specific_name\n        for name, cfg_point in state.cfg_points.items():\n            if name in state.specific_config[source]:\n                state.seen_specific_config[name].append(str(source))\n                value = state.specific_config[source].pop(name)\n                cfg_point.set_value(final_config, value, check_mandatory=False)\n\n        if state.specific_config[source]:\n            msg = 'Unexpected values for {}: {}'\n            raise ConfigError(msg.format(state.specific_name,\n                                         state.specific_config[source]))\n"
  },
  {
    "path": "wa/framework/configuration/tree.py",
    "content": "#    Copyright 2016-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\n\nfrom wa.utils import log\n\n\nlogger = logging.getLogger('config')\n\n\nclass JobSpecSource(object):\n\n    kind = \"\"\n\n    def __init__(self, config, parent=None):\n        self.config = config\n        self.parent = parent\n        self._log_self()\n\n    @property\n    def id(self):\n        return self.config['id']\n\n    @property\n    def name(self):\n        raise NotImplementedError()\n\n    def _log_self(self):\n        logger.debug('Creating {} node'.format(self.kind))\n        with log.indentcontext():\n            for key, value in self.config.items():\n                logger.debug('\"{}\" to \"{}\"'.format(key, value))\n\n\nclass WorkloadEntry(JobSpecSource):\n    kind = \"workload\"\n\n    @property\n    def name(self):\n        if self.parent.id == \"global\":\n            return 'workload \"{}\"'.format(self.id)\n        else:\n            return 'workload \"{}\" from section \"{}\"'.format(self.id, self.parent.id)\n\n\nclass SectionNode(JobSpecSource):\n\n    kind = \"section\"\n\n    @property\n    def name(self):\n        if self.id == \"global\":\n            return \"globally specified configuration\"\n        else:\n            return 'section \"{}\"'.format(self.id)\n\n    @property\n    def is_leaf(self):\n        return not bool(self.children)\n\n    def __init__(self, config, parent=None, group=None):\n        super(SectionNode, self).__init__(config, parent=parent)\n        self.workload_entries = []\n        self.children = []\n        self.group = group\n\n    def add_section(self, section, group=None):\n        # Each level is the same group, only need to check first\n        if not self.children or group == self.children[0].group:\n            new_node = SectionNode(section, parent=self, group=group)\n            self.children.append(new_node)\n        else:\n            for child in self.children:\n                new_node = child.add_section(section, group)\n        return new_node\n\n    def add_workload(self, workload_config):\n        self.workload_entries.append(WorkloadEntry(workload_config, self))\n\n    def descendants(self):\n        for child in self.children:\n            for n in child.descendants():\n                yield n\n            yield child\n\n    def ancestors(self):\n        if self.parent is not None:\n            yield self.parent\n            for ancestor in self.parent.ancestors():\n                yield ancestor\n\n    def leaves(self):\n        if self.is_leaf:\n            yield self\n        else:\n            for n in self.descendants():\n                if n.is_leaf:\n                    yield n\n"
  },
  {
    "path": "wa/framework/entrypoint.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport sys\nimport argparse\nimport locale\nimport logging\nimport os\nimport warnings\n\nimport devlib\ntry:\n    from devlib.utils.version import version as installed_devlib_version\nexcept ImportError:\n    installed_devlib_version = None\n\nfrom wa.framework import pluginloader\nfrom wa.framework.command import init_argument_parser\nfrom wa.framework.configuration import settings\nfrom wa.framework.configuration.execution import ConfigManager\nfrom wa.framework.host import init_user_directory, init_config\nfrom wa.framework.exception import ConfigError, HostError\nfrom wa.framework.version import (get_wa_version_with_commit, format_version,\n                                  required_devlib_version)\nfrom wa.utils import log\nfrom wa.utils.doc import format_body\n\nwarnings.filterwarnings(action='ignore', category=UserWarning, module='zope')\n\n# Disable this to avoid false positive from dynamically-created attributes.\n# pylint: disable=no-member\n\nlogger = logging.getLogger('command_line')\n\n\ndef load_commands(subparsers):\n    commands = {}\n    for command in pluginloader.list_commands():\n        commands[command.name] = pluginloader.get_command(command.name,\n                                                          subparsers=subparsers)\n    return commands\n\n\n# ArgumentParser.parse_known_args() does not correctly deal with concatenated\n# single character options. See https://bugs.python.org/issue16142 for the\n# description of the issue (with a fix attached since 2013!). To get around\n# this problem, this will pre-process sys.argv to detect such joined options\n# and split them.\ndef split_joined_options(argv):\n    output = []\n    for part in argv:\n        if len(part) > 1 and part[0] == '-' and part[1] != '-':\n            for c in part[1:]:\n                output.append('-' + c)\n        else:\n            output.append(part)\n    return output\n\n\n# Instead of presenting an obscure error due to a version mismatch explicitly warn the user.\ndef check_devlib_version():\n    if not installed_devlib_version or installed_devlib_version[:-1] <= required_devlib_version[:-1]:\n        # Check the 'dev' field separately to account for comparing with release versions.\n        if installed_devlib_version.dev and installed_devlib_version.dev < required_devlib_version.dev:\n            msg = 'WA requires Devlib version >={}. Please update the currently installed version {}'\n            raise HostError(msg.format(format_version(required_devlib_version), devlib.__version__))\n\n\n# If the default encoding is not UTF-8 warn the user as this may cause compatibility issues\n# when parsing files.\ndef check_system_encoding():\n    system_encoding = locale.getpreferredencoding()\n    msg = 'System Encoding: {}'.format(system_encoding)\n    if 'UTF-8' not in system_encoding:\n        logger.warning(msg)\n        logger.warning('To prevent encoding issues please use a locale setting which supports UTF-8')\n    else:\n        logger.debug(msg)\n\n\ndef main():\n    if not os.path.exists(settings.user_directory):\n        init_user_directory()\n    if not os.path.exists(os.path.join(settings.user_directory, 'config.yaml')):\n        init_config()\n\n    try:\n\n        description = (\"Execute automated workloads on a remote device and process \"\n                       \"the resulting output.\\n\\nUse \\\"wa <subcommand> -h\\\" to see \"\n                       \"help for individual subcommands.\")\n        parser = argparse.ArgumentParser(description=format_body(description, 80),\n                                         prog='wa',\n                                         formatter_class=argparse.RawDescriptionHelpFormatter,\n                                         )\n        init_argument_parser(parser)\n\n        # load_commands will trigger plugin enumeration, and we want logging\n        # to be enabled for that, which requires the verbosity setting; however\n        # full argument parsing cannot be completed until the commands are loaded; so\n        # parse just the base args for now so we can get verbosity.\n        argv = split_joined_options(sys.argv[1:])\n\n        # 'Parse_known_args' automatically displays the default help and exits\n        # if '-h' or '--help' is detected, we want our custom help messages so\n        # ensure these are never passed as parameters.\n        filtered_argv = list(argv)\n        if '-h' in filtered_argv:\n            filtered_argv.remove('-h')\n        elif '--help' in filtered_argv:\n            filtered_argv.remove('--help')\n\n        args, _ = parser.parse_known_args(filtered_argv)\n        settings.set(\"verbosity\", args.verbose)\n        log.init(settings.verbosity)\n        logger.debug('Version: {}'.format(get_wa_version_with_commit()))\n        logger.debug('devlib version: {}'.format(devlib.__full_version__))\n        logger.debug('Command Line: {}'.format(' '.join(sys.argv)))\n        check_devlib_version()\n        check_system_encoding()\n\n        # each command will add its own subparser\n        subparsers = parser.add_subparsers(dest='command')\n        subparsers.required = True\n        commands = load_commands(subparsers)\n        args = parser.parse_args(argv)\n\n        config = ConfigManager()\n        config.load_config_file(settings.user_config_file)\n        for config_file in args.config:\n            if not os.path.exists(config_file):\n                raise ConfigError(\"Config file {} not found\".format(config_file))\n            config.load_config_file(config_file)\n\n        command = commands[args.command]\n        sys.exit(command.execute(config, args))\n\n    except KeyboardInterrupt as e:\n        log.log_error(e, logger)\n        sys.exit(3)\n    except Exception as e:  # pylint: disable=broad-except\n        log.log_error(e, logger)\n        sys.exit(2)\n"
  },
  {
    "path": "wa/framework/exception.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# pylint: disable=unused-import\nfrom devlib.exception import (DevlibError, HostError, TimeoutError,  # pylint: disable=redefined-builtin\n                              TargetError, TargetNotRespondingError)\n\nfrom wa.utils.misc import get_traceback\n\n\nclass WAError(Exception):\n    \"\"\"Base class for all Workload Automation exceptions.\"\"\"\n    @property\n    def message(self):\n        if self.args:\n            return self.args[0]\n        return ''\n\n\nclass NotFoundError(WAError):\n    \"\"\"Raised when the specified item is not found.\"\"\"\n\n\nclass ValidationError(WAError):\n    \"\"\"Raised on failure to validate an extension.\"\"\"\n\n\nclass ExecutionError(WAError):\n    \"\"\"Error encountered by the execution framework.\"\"\"\n\n\nclass WorkloadError(WAError):\n    \"\"\"General Workload error.\"\"\"\n\n\nclass JobError(WAError):\n    \"\"\"Job execution error.\"\"\"\n\n\nclass InstrumentError(WAError):\n    \"\"\"General Instrument error.\"\"\"\n\n\nclass OutputProcessorError(WAError):\n    \"\"\"General OutputProcessor error.\"\"\"\n\n\nclass ResourceError(WAError):\n    \"\"\"General Resolver error.\"\"\"\n\n\nclass CommandError(WAError):\n    \"\"\"Raised by commands when they have encountered an error condition\n    during execution.\"\"\"\n\n\nclass ToolError(WAError):\n    \"\"\"Raised by tools when they have encountered an error condition\n    during execution.\"\"\"\n\n\nclass ConfigError(WAError):\n    \"\"\"Raised when configuration provided is invalid. This error suggests that\n    the user should modify their config and try again.\"\"\"\n\n\nclass SerializerSyntaxError(Exception):\n    \"\"\"\n    Error loading a serialized structure from/to a file handle.\n    \"\"\"\n    @property\n    def message(self):\n        if self.args:\n            return self.args[0]\n        return ''\n\n    def __init__(self, message, line=None, column=None):\n        super(SerializerSyntaxError, self).__init__(message)\n        self.line = line\n        self.column = column\n\n    def __str__(self):\n        linestring = ' on line {}'.format(self.line) if self.line else ''\n        colstring = ' in column {}'.format(self.column) if self.column else ''\n        message = 'Syntax Error{}: {}'\n        return message.format(''.join([linestring, colstring]), self.message)\n\n\nclass PluginLoaderError(WAError):\n    \"\"\"Raised when there is an error loading an extension or\n    an external resource. Apart form the usual message, the __init__\n    takes an exc_info parameter which should be the result of\n    sys.exc_info() for the original exception (if any) that\n    caused the error.\"\"\"\n\n    def __init__(self, message, exc_info=None):\n        super(PluginLoaderError, self).__init__(message)\n        self.exc_info = exc_info\n\n    def __str__(self):\n        if self.exc_info:\n            orig = self.exc_info[1]\n            orig_name = type(orig).__name__\n            if isinstance(orig, WAError):\n                reason = 'because of:\\n{}: {}'.format(orig_name, orig)\n            else:\n                text = 'because of:\\n{}\\n{}: {}'\n                reason = text.format(get_traceback(self.exc_info), orig_name, orig)\n            return '\\n'.join([self.message, reason])\n        else:\n            return self.message\n\n\nclass WorkerThreadError(WAError):\n    \"\"\"\n    This should get raised  in the main thread if a non-WAError-derived\n    exception occurs on a worker/background thread. If a WAError-derived\n    exception is raised in the worker, then it that exception should be\n    re-raised on the main thread directly -- the main point of this is to\n    preserve the backtrace in the output, and backtrace doesn't get output for\n    WAErrors.\n\n    \"\"\"\n\n    def __init__(self, thread, exc_info):\n        self.thread = thread\n        self.exc_info = exc_info\n        orig = self.exc_info[1]\n        orig_name = type(orig).__name__\n        text = 'Exception of type {} occured on thread {}:\\n{}\\n{}: {}'\n        message = text.format(orig_name, thread, get_traceback(self.exc_info),\n                              orig_name, orig)\n        super(WorkerThreadError, self).__init__(message)\n"
  },
  {
    "path": "wa/framework/execution.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=no-member\n\nimport hashlib\nimport logging\nimport os\nimport shutil\nfrom copy import copy\nfrom datetime import datetime\n\nimport wa.framework.signal as signal\nfrom wa.framework import instrument as instrumentation\nfrom wa.framework.configuration.core import Status\nfrom wa.framework.exception import TargetError, HostError, WorkloadError, ExecutionError\nfrom wa.framework.exception import TargetNotRespondingError, TimeoutError  # pylint: disable=redefined-builtin\nfrom wa.framework.job import Job\nfrom wa.framework.output import init_job_output\nfrom wa.framework.output_processor import ProcessorManager\nfrom wa.framework.resource import ResourceResolver\nfrom wa.framework.target.manager import TargetManager\nfrom wa.utils import log\nfrom wa.utils.misc import merge_config_values, format_duration\n\n\nclass ExecutionContext(object):\n\n    @property\n    def previous_job(self):\n        if not self.completed_jobs:\n            return None\n        return self.completed_jobs[-1]\n\n    @property\n    def next_job(self):\n        if not self.job_queue:\n            return None\n        return self.job_queue[0]\n\n    @property\n    def spec_changed(self):\n        if self.previous_job is None and self.current_job is not None:  # Start of run\n            return True\n        if self.previous_job is not None and self.current_job is None:  # End of run\n            return True\n        return self.current_job.spec.id != self.previous_job.spec.id\n\n    @property\n    def spec_will_change(self):\n        if self.current_job is None and self.next_job is not None:  # Start of run\n            return True\n        if self.current_job is not None and self.next_job is None:  # End of run\n            return True\n        return self.current_job.spec.id != self.next_job.spec.id\n\n    @property\n    def workload(self):\n        if self.current_job:\n            return self.current_job.workload\n\n    @property\n    def job_output(self):\n        if self.current_job:\n            return self.current_job.output\n\n    @property\n    def output(self):\n        if self.current_job:\n            return self.job_output\n        return self.run_output\n\n    @property\n    def output_directory(self):\n        return self.output.basepath\n\n    @property\n    def reboot_policy(self):\n        return self.cm.run_config.reboot_policy\n\n    @property\n    def target_info(self):\n        return self.run_output.target_info\n\n    def __init__(self, cm, tm, output):\n        self.logger = logging.getLogger('context')\n        self.cm = cm\n        self.tm = tm\n        self.run_output = output\n        self.run_state = output.state\n        self.job_queue = None\n        self.completed_jobs = None\n        self.current_job = None\n        self.successful_jobs = 0\n        self.failed_jobs = 0\n        self.run_interrupted = False\n        self._load_resource_getters()\n\n    def start_run(self):\n        self.output.info.start_time = datetime.utcnow()\n        self.output.write_info()\n        self.job_queue = copy(self.cm.jobs)\n        self.completed_jobs = []\n        self.run_state.status = Status.STARTED\n        self.output.status = Status.STARTED\n        self.output.write_state()\n\n    def end_run(self):\n        if self.successful_jobs:\n            if self.failed_jobs:\n                status = Status.PARTIAL\n            else:\n                status = Status.OK\n        else:\n            status = Status.FAILED\n        self.run_state.status = status\n        self.run_output.status = status\n        self.run_output.info.end_time = datetime.utcnow()\n        self.run_output.info.duration = (self.run_output.info.end_time\n                                         - self.run_output.info.start_time)\n        self.write_output()\n\n    def finalize(self):\n        self.tm.finalize()\n\n    def start_job(self):\n        if not self.job_queue:\n            raise RuntimeError('No jobs to run')\n        self.current_job = self.job_queue.pop(0)\n        job_output = init_job_output(self.run_output, self.current_job)\n        self.current_job.set_output(job_output)\n        return self.current_job\n\n    def end_job(self):\n        if not self.current_job:\n            raise RuntimeError('No jobs in progress')\n        self.completed_jobs.append(self.current_job)\n        self.output.write_result()\n        self.current_job = None\n\n    def set_status(self, status, force=False, write=True):\n        if not self.current_job:\n            raise RuntimeError('No jobs in progress')\n        self.set_job_status(self.current_job, status, force, write)\n\n    def set_job_status(self, job, status, force=False, write=True):\n        job.set_status(status, force)\n        if write:\n            self.run_output.write_state()\n\n    def extract_results(self):\n        self.tm.extract_results(self)\n\n    def move_failed(self, job):\n        self.run_output.move_failed(job.output)\n\n    def skip_job(self, job):\n        self.set_job_status(job, Status.SKIPPED, force=True)\n        self.completed_jobs.append(job)\n\n    def skip_remaining_jobs(self):\n        while self.job_queue:\n            job = self.job_queue.pop(0)\n            self.skip_job(job)\n        self.write_state()\n\n    def write_config(self):\n        self.run_output.write_config(self.cm.get_config())\n\n    def write_state(self):\n        self.run_output.write_state()\n\n    def write_output(self):\n        self.run_output.write_info()\n        self.run_output.write_state()\n        self.run_output.write_result()\n\n    def write_job_specs(self):\n        self.run_output.write_job_specs(self.cm.job_specs)\n\n    def add_augmentation(self, aug):\n        self.cm.run_config.add_augmentation(aug)\n\n    def get_resource(self, resource, strict=True):\n        result = self.resolver.get(resource, strict)\n        if result is None:\n            return result\n        if os.path.isfile(result):\n            with open(result, 'rb') as fh:\n                md5hash = hashlib.md5(fh.read())\n                key = '{}/{}'.format(resource.owner, os.path.basename(result))\n                self.update_metadata('hashes', key, md5hash.hexdigest())\n        return result\n\n    get = get_resource  # alias to allow a context to act as a resolver\n\n    def get_metric(self, name):\n        try:\n            return self.output.get_metric(name)\n        except HostError:\n            if not self.current_job:\n                raise\n            return self.run_output.get_metric(name)\n\n    def add_metric(self, name, value, units=None, lower_is_better=False,\n                   classifiers=None):\n        if self.current_job:\n            classifiers = merge_config_values(self.current_job.classifiers,\n                                              classifiers)\n        self.output.add_metric(name, value, units, lower_is_better, classifiers)\n\n    def get_artifact(self, name):\n        try:\n            return self.output.get_artifact(name)\n        except HostError:\n            if not self.current_job:\n                raise\n            return self.run_output.get_artifact(name)\n\n    def get_artifact_path(self, name):\n        try:\n            return self.output.get_artifact_path(name)\n        except HostError:\n            if not self.current_job:\n                raise\n            return self.run_output.get_artifact_path(name)\n\n    def add_artifact(self, name, path, kind, description=None, classifiers=None):\n        self.output.add_artifact(name, path, kind, description, classifiers)\n\n    def add_run_artifact(self, name, path, kind, description=None,\n                         classifiers=None):\n        self.run_output.add_artifact(name, path, kind, description, classifiers)\n\n    def add_event(self, message):\n        self.output.add_event(message)\n\n    def add_classifier(self, name, value, overwrite=False):\n        self.output.add_classifier(name, value, overwrite)\n        if self.current_job:\n            self.current_job.add_classifier(name, value, overwrite)\n\n    def add_metadata(self, key, *args, **kwargs):\n        self.output.add_metadata(key, *args, **kwargs)\n\n    def update_metadata(self, key, *args):\n        self.output.update_metadata(key, *args)\n\n    def take_screenshot(self, filename):\n        filepath = self._get_unique_filepath(filename)\n        self.tm.target.capture_screen(filepath)\n        if os.path.isfile(filepath):\n            self.add_artifact('screenshot', filepath, kind='log')\n\n    def take_uiautomator_dump(self, filename):\n        filepath = self._get_unique_filepath(filename)\n        self.tm.target.capture_ui_hierarchy(filepath)\n        self.add_artifact('uitree', filepath, kind='log')\n\n    def record_ui_state(self, basename):\n        self.logger.info('Recording screen state...')\n        self.take_screenshot('{}.png'.format(basename))\n        target = self.tm.target\n        if target.os == 'android' or\\\n           (target.os == 'chromeos' and target.has('android_container')):\n            self.take_uiautomator_dump('{}.uix'.format(basename))\n\n    def initialize_jobs(self):\n        new_queue = []\n        failed_ids = []\n        for job in self.job_queue:\n            if job.id in failed_ids:\n                # Don't try to initialize a job if another job with the same ID\n                # (i.e. same job spec) has failed - we can assume it will fail\n                # too.\n                self.skip_job(job)\n                continue\n\n            try:\n                job.initialize(self)\n            except WorkloadError as e:\n                self.set_job_status(job, Status.FAILED, write=False)\n                log.log_error(e, self.logger)\n                failed_ids.append(job.id)\n\n                if self.cm.run_config.bail_on_init_failure:\n                    raise\n            else:\n                new_queue.append(job)\n\n        self.job_queue = new_queue\n        self.write_state()\n\n    def _load_resource_getters(self):\n        self.logger.debug('Loading resource discoverers')\n        self.resolver = ResourceResolver(self.cm.plugin_cache)\n        self.resolver.load()\n        for getter in self.resolver.getters:\n            self.cm.run_config.add_resource_getter(getter)\n\n    def _get_unique_filepath(self, filename):\n        filepath = os.path.join(self.output_directory, filename)\n        rest, ext = os.path.splitext(filepath)\n        i = 1\n        new_filepath = '{}-{}{}'.format(rest, i, ext)\n\n        if not os.path.exists(filepath) and not os.path.exists(new_filepath):\n            return filepath\n        elif not os.path.exists(new_filepath):\n            # new_filepath does not exit, thefore filepath must exit.\n            # this is the first collision\n            shutil.move(filepath, new_filepath)\n\n        while os.path.exists(new_filepath):\n            i += 1\n            new_filepath = '{}-{}{}'.format(rest, i, ext)\n        return new_filepath\n\n\nclass Executor(object):\n    \"\"\"\n    The ``Executor``'s job is to set up the execution context and pass to a\n    ``Runner`` along with a loaded run specification. Once the ``Runner`` has\n    done its thing, the ``Executor`` performs some final reporting before\n    returning.\n\n    The initial context set up involves combining configuration from various\n    sources, loading of required workloads, loading and installation of\n    instruments and output processors, etc. Static validation of the combined\n    configuration is also performed.\n\n    \"\"\"\n    # pylint: disable=R0915\n\n    def __init__(self):\n        self.logger = logging.getLogger('executor')\n        self.error_logged = False\n        self.warning_logged = False\n        self.target_manager = None\n\n    def execute(self, config_manager, output):\n        \"\"\"\n        Execute the run specified by an agenda. Optionally, selectors may be\n        used to only execute a subset of the specified agenda.\n\n        Params::\n\n            :state: a ``ConfigManager`` containing processed configuration\n            :output: an initialized ``RunOutput`` that will be used to\n                     store the results.\n\n        \"\"\"\n        signal.connect(self._error_signalled_callback, signal.ERROR_LOGGED)\n        signal.connect(self._warning_signalled_callback, signal.WARNING_LOGGED)\n\n        self.logger.info('Initializing run')\n        self.logger.debug('Finalizing run configuration.')\n        config = config_manager.finalize()\n        output.write_config(config)\n\n        self.target_manager = TargetManager(config.run_config.device,\n                                            config.run_config.device_config,\n                                            output.basepath)\n\n        self.logger.info('Initializing execution context')\n        context = ExecutionContext(config_manager, self.target_manager, output)\n\n        try:\n            self.do_execute(context)\n        except KeyboardInterrupt as e:\n            context.run_output.status = Status.ABORTED\n            log.log_error(e, self.logger)\n            context.write_output()\n            raise\n        except Exception as e:\n            context.run_output.status = Status.FAILED\n            log.log_error(e, self.logger)\n            context.write_output()\n            raise\n        finally:\n            context.finalize()\n            self.execute_postamble(context, output)\n            signal.send(signal.RUN_COMPLETED, self, context)\n\n    def do_execute(self, context):\n        self.logger.info('Connecting to target')\n        context.tm.initialize()\n\n        if context.cm.run_config.reboot_policy.perform_initial_reboot:\n            self.logger.info('Performing initial reboot.')\n            attempts = context.cm.run_config.max_retries\n            while attempts:\n                try:\n                    self.target_manager.reboot(context)\n                except TargetError as e:\n                    if attempts:\n                        attempts -= 1\n                    else:\n                        raise e\n                else:\n                    break\n\n        context.output.set_target_info(self.target_manager.get_target_info())\n\n        self.logger.info('Generating jobs')\n        context.cm.generate_jobs(context)\n        context.write_job_specs()\n        context.output.write_state()\n\n        self.logger.info('Installing instruments')\n        for instrument in context.cm.get_instruments(self.target_manager.target):\n            instrumentation.install(instrument, context)\n        instrumentation.validate()\n\n        self.logger.info('Installing output processors')\n        pm = ProcessorManager()\n        for proc in context.cm.get_processors():\n            pm.install(proc, context)\n        pm.validate()\n\n        context.write_config()\n\n        self.logger.info('Starting run')\n        runner = Runner(context, pm)\n        signal.send(signal.RUN_STARTED, self, context)\n        runner.run()\n\n    def execute_postamble(self, context, output):\n        self.logger.info('Done.')\n        duration = format_duration(output.info.duration)\n        self.logger.info('Run duration: {}'.format(duration))\n        num_ran = context.run_state.num_completed_jobs\n        status_summary = 'Ran a total of {} iterations: '.format(num_ran)\n\n        counter = context.run_state.get_status_counts()\n        parts = []\n        for status in reversed(Status.levels):\n            if status in counter:\n                parts.append('{} {}'.format(counter[status], status))\n        self.logger.info('{}{}'.format(status_summary, ', '.join(parts)))\n\n        self.logger.info('Results can be found in {}'.format(output.basepath))\n\n        if self.error_logged:\n            self.logger.warning('There were errors during execution.')\n            self.logger.warning('Please see {}'.format(output.logfile))\n        elif self.warning_logged:\n            self.logger.warning('There were warnings during execution.')\n            self.logger.warning('Please see {}'.format(output.logfile))\n\n    def _error_signalled_callback(self, _):\n        self.error_logged = True\n        signal.disconnect(self._error_signalled_callback, signal.ERROR_LOGGED)\n\n    def _warning_signalled_callback(self, _):\n        self.warning_logged = True\n        signal.disconnect(self._warning_signalled_callback, signal.WARNING_LOGGED)\n\n    def __str__(self):\n        return 'executor'\n\n    __repr__ = __str__\n\n\nclass Runner(object):\n    \"\"\"\n    Triggers running jobs and processing results\n\n    Takes pre-initialized ExcecutionContext and ProcessorManager. Handles\n    actually running the jobs, and triggers the ProcessorManager to handle\n    processing job and run results.\n    \"\"\"\n\n    def __init__(self, context, pm):\n        self.logger = logging.getLogger('runner')\n        self.context = context\n        self.pm = pm\n        self.output = self.context.output\n        self.config = self.context.cm\n\n    def run(self):\n        try:\n            self.initialize_run()\n            self.send(signal.RUN_INITIALIZED)\n\n            with signal.wrap('JOB_QUEUE_EXECUTION', self, self.context):\n                while self.context.job_queue:\n                    if self.context.run_interrupted:\n                        raise KeyboardInterrupt()\n                    self.run_next_job(self.context)\n\n        except KeyboardInterrupt as e:\n            log.log_error(e, self.logger)\n            self.logger.info('Skipping remaining jobs.')\n            self.context.skip_remaining_jobs()\n        except Exception as e:\n            message = e.args[0] if e.args else str(e)\n            log.log_error(e, self.logger)\n            self.logger.error('Skipping remaining jobs due to \"{}\".'.format(message))\n            self.context.skip_remaining_jobs()\n            raise e\n        finally:\n            self.finalize_run()\n            self.send(signal.RUN_FINALIZED)\n\n    def initialize_run(self):\n        self.logger.info('Initializing run')\n        signal.connect(self._error_signalled_callback, signal.ERROR_LOGGED)\n        signal.connect(self._warning_signalled_callback, signal.WARNING_LOGGED)\n        self.context.start_run()\n        self.pm.initialize(self.context)\n        with log.indentcontext():\n            self.context.initialize_jobs()\n        self.context.write_state()\n\n    def finalize_run(self):\n        self.logger.info('Run completed')\n        with log.indentcontext():\n            for job in self.context.completed_jobs:\n                job.finalize(self.context)\n        self.logger.info('Finalizing run')\n        self.context.end_run()\n        instrumentation.enable_all()\n        self.pm.enable_all()\n        with signal.wrap('RUN_OUTPUT_PROCESSED', self):\n            self.pm.process_run_output(self.context)\n            self.pm.export_run_output(self.context)\n        self.pm.finalize(self.context)\n        if self.context.reboot_policy.reboot_on_run_completion:\n            self.logger.info('Rebooting target on run completion.')\n            self.context.tm.reboot(self.context)\n        signal.disconnect(self._error_signalled_callback, signal.ERROR_LOGGED)\n        signal.disconnect(self._warning_signalled_callback, signal.WARNING_LOGGED)\n\n    def run_next_job(self, context):\n        job = context.start_job()\n        self.logger.info('Running job {}'.format(job.id))\n\n        try:\n            log.indent()\n            if self.context.reboot_policy.reboot_on_each_job:\n                self.logger.info('Rebooting on new job.')\n                self.context.tm.reboot(context)\n            elif self.context.reboot_policy.reboot_on_each_spec and context.spec_changed:\n                self.logger.info('Rebooting on new spec.')\n                self.context.tm.reboot(context)\n\n            with signal.wrap('JOB', self, context):\n                context.tm.start()\n                self.do_run_job(job, context)\n                context.set_job_status(job, Status.OK)\n        except (Exception, KeyboardInterrupt) as e:  # pylint: disable=broad-except\n            log.log_error(e, self.logger)\n            if isinstance(e, KeyboardInterrupt):\n                context.run_interrupted = True\n                context.set_job_status(job, Status.ABORTED)\n                raise e\n            else:\n                context.set_job_status(job, Status.FAILED)\n            if isinstance(e, TargetNotRespondingError):\n                raise e\n            elif isinstance(e, TargetError):\n                context.tm.verify_target_responsive(context)\n        finally:\n            self.logger.info('Completing job {}'.format(job.id))\n            self.send(signal.JOB_COMPLETED)\n            context.tm.stop()\n            context.end_job()\n\n            log.dedent()\n            self.check_job(job)\n\n    def do_run_job(self, job, context):\n        # pylint: disable=too-many-branches,too-many-statements\n        rc = self.context.cm.run_config\n        if job.workload.phones_home and not rc.allow_phone_home:\n            self.logger.warning('Skipping job {} ({}) due to allow_phone_home=False'\n                                .format(job.id, job.workload.name))\n            self.context.skip_job(job)\n            return\n\n        context.set_job_status(job, Status.RUNNING)\n        self.send(signal.JOB_STARTED)\n\n        job.configure_augmentations(context, self.pm)\n\n        with signal.wrap('JOB_TARGET_CONFIG', self, context):\n            job.configure_target(context)\n\n        try:\n            job.setup(context)\n        except Exception as e:\n            context.set_job_status(job, Status.FAILED)\n            log.log_error(e, self.logger)\n            if isinstance(e, (TargetError, TimeoutError)):\n                context.tm.verify_target_responsive(context)\n            self.context.record_ui_state('setup-error')\n            raise e\n\n        try:\n\n            try:\n                job.run(context)\n            except KeyboardInterrupt:\n                context.run_interrupted = True\n                context.set_job_status(job, Status.ABORTED)\n                raise\n            except Exception as e:\n                context.set_job_status(job, Status.FAILED)\n                log.log_error(e, self.logger)\n                if isinstance(e, (TargetError, TimeoutError)):\n                    context.tm.verify_target_responsive(context)\n                self.context.record_ui_state('run-error')\n                raise e\n            finally:\n                try:\n                    with signal.wrap('JOB_OUTPUT_PROCESSED', self, context):\n                        job.process_output(context)\n                        self.pm.process_job_output(context)\n                    self.pm.export_job_output(context)\n                except Exception as e:\n                    context.set_job_status(job, Status.PARTIAL)\n                    if isinstance(e, (TargetError, TimeoutError)):\n                        context.tm.verify_target_responsive(context)\n                    self.context.record_ui_state('output-error')\n                    raise\n\n        except KeyboardInterrupt:\n            context.run_interrupted = True\n            context.set_status(Status.ABORTED)\n            raise\n        finally:\n            # If setup was successfully completed, teardown must\n            # run even if the job failed\n            job.teardown(context)\n\n    def check_job(self, job):\n        rc = self.context.cm.run_config\n        if job.status in rc.retry_on_status:\n            if job.retries < rc.max_retries:\n                msg = 'Job {} iteration {} completed with status {}. retrying...'\n                self.logger.error(msg.format(job.id, job.iteration, job.status))\n                self.retry_job(job)\n                self.context.move_failed(job)\n                self.context.write_state()\n            else:\n                msg = 'Job {} iteration {} completed with status {}. '\\\n                      'Max retries exceeded.'\n                self.logger.error(msg.format(job.id, job.iteration, job.status))\n                self.context.failed_jobs += 1\n                self.send(signal.JOB_FAILED)\n                if rc.bail_on_job_failure:\n                    raise ExecutionError('Job {} failed, bailing.'.format(job.id))\n\n        else:  # status not in retry_on_status\n            self.logger.info('Job completed with status {}'.format(job.status))\n            if job.status != 'ABORTED':\n                self.context.successful_jobs += 1\n            else:\n                self.context.failed_jobs += 1\n                self.send(signal.JOB_ABORTED)\n\n    def retry_job(self, job):\n        retry_job = Job(job.spec, job.iteration, self.context)\n        retry_job.workload = job.workload\n        retry_job.state = job.state\n        retry_job.retries = job.retries + 1\n        self.context.set_job_status(retry_job, Status.PENDING, force=True)\n        self.context.job_queue.insert(0, retry_job)\n        self.send(signal.JOB_RESTARTED)\n\n    def send(self, s):\n        signal.send(s, self, self.context)\n\n    def _error_signalled_callback(self, record):\n        self.context.add_event(record.getMessage())\n\n    def _warning_signalled_callback(self, record):\n        self.context.add_event(record.getMessage())\n\n    def __str__(self):\n        return 'runner'\n"
  },
  {
    "path": "wa/framework/getters.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nThis module contains the standard set of resource getters used by Workload Automation.\n\n\"\"\"\nimport http.client\nimport json\nimport logging\nimport os\nimport shutil\nimport sys\n\nimport requests\n\n\nfrom wa import Parameter, settings, __file__ as _base_filepath\nfrom wa.framework.resource import ResourceGetter, SourcePriority, NO_ONE\nfrom wa.framework.exception import ResourceError\nfrom wa.utils.misc import (ensure_directory_exists as _d, atomic_write_path,\n                           ensure_file_directory_exists as _f, sha256, urljoin)\nfrom wa.utils.types import boolean, caseless_string\n\n# Because of use of Enum (dynamic attrs)\n# pylint: disable=no-member\n\nlogging.getLogger(\"requests\").setLevel(logging.WARNING)\nlogging.getLogger(\"urllib3\").setLevel(logging.WARNING)\n\nlogger = logging.getLogger('resource')\n\n\ndef get_by_extension(path, ext):\n    if not ext.startswith('.'):\n        ext = '.' + ext\n    ext = caseless_string(ext)\n\n    found = []\n    for entry in os.listdir(path):\n        entry_ext = os.path.splitext(entry)[1]\n        if entry_ext == ext:\n            found.append(os.path.join(path, entry))\n    return found\n\n\ndef get_generic_resource(resource, files):\n    matches = []\n    for f in files:\n        if resource.match(f):\n            matches.append(f)\n    if not matches:\n        return None\n    if len(matches) > 1:\n        msg = 'Multiple matches for {}: {}'\n        raise ResourceError(msg.format(resource, matches))\n    return matches[0]\n\n\ndef get_path_matches(resource, files):\n    matches = []\n    for f in files:\n        if resource.match_path(f):\n            matches.append(f)\n    return matches\n\n\n# pylint: disable=too-many-return-statements\ndef get_from_location(basepath, resource):\n    if resource.kind == 'file':\n        path = os.path.join(basepath, resource.path)\n        if os.path.exists(path):\n            return path\n    elif resource.kind == 'executable':\n        bin_dir = os.path.join(basepath, 'bin', resource.abi)\n        if not os.path.exists(bin_dir):\n            return None\n        for entry in os.listdir(bin_dir):\n            path = os.path.join(bin_dir, entry)\n            if resource.match(path):\n                return path\n    elif resource.kind == 'revent':\n        path = os.path.join(basepath, 'revent_files')\n        if os.path.exists(path):\n            files = get_by_extension(path, resource.kind)\n            found_resource = get_generic_resource(resource, files)\n            if found_resource:\n                return found_resource\n        files = get_by_extension(basepath, resource.kind)\n        return get_generic_resource(resource, files)\n    elif resource.kind in ['apk', 'jar']:\n        files = get_by_extension(basepath, resource.kind)\n        return get_generic_resource(resource, files)\n\n    return None\n\n\nclass Package(ResourceGetter):\n\n    name = 'package'\n\n    def register(self, resolver):\n        resolver.register(self.get, SourcePriority.package)\n\n    # pylint: disable=no-self-use\n    def get(self, resource):\n        if resource.owner == NO_ONE:\n            basepath = os.path.join(os.path.dirname(_base_filepath), 'assets')\n        else:\n            modname = resource.owner.__module__\n            basepath = os.path.dirname(sys.modules[modname].__file__)\n        return get_from_location(basepath, resource)\n\n\nclass UserDirectory(ResourceGetter):\n\n    name = 'user'\n\n    def register(self, resolver):\n        resolver.register(self.get, SourcePriority.local)\n\n    # pylint: disable=no-self-use\n    def get(self, resource):\n        basepath = settings.dependencies_directory\n        directory = _d(os.path.join(basepath, resource.owner.name))\n        return get_from_location(directory, resource)\n\n\nclass Http(ResourceGetter):\n\n    name = 'http'\n    description = \"\"\"\n    Downloads resources from a server based on an index fetched from the\n    specified URL.\n\n    Given a URL, this will try to fetch ``<URL>/index.json``. The index file\n    maps extension names to a list of corresponing asset descriptons. Each\n    asset description continas a path (relative to the base URL) of the\n    resource and a SHA256 hash, so that this Getter can verify whether the\n    resource on the remote has changed.\n\n    For example, let's assume we want to get the APK file for workload \"foo\",\n    and that assets are hosted at ``http://example.com/assets``. This Getter\n    will first try to donwload ``http://example.com/assests/index.json``. The\n    index file may contian something like ::\n\n        {\n            \"foo\": [\n                {\n                    \"path\": \"foo-app.apk\",\n                    \"sha256\": \"b14530bb47e04ed655ac5e80e69beaa61c2020450e18638f54384332dffebe86\"\n                },\n                {\n                    \"path\": \"subdir/some-other-asset.file\",\n                    \"sha256\": \"48d9050e9802246d820625717b72f1c2ba431904b8484ca39befd68d1dbedfff\"\n                }\n            ]\n        }\n\n    This Getter will look through the list of assets for \"foo\" (in this case,\n    two) check the paths until it finds one matching the resource (in this\n    case, \"foo-app.apk\").  Finally, it will try to dowload that file relative\n    to the base URL and extension name (in this case,\n    \"http://example.com/assets/foo/foo-app.apk\"). The downloaded version will\n    be cached locally, so that in the future, the getter will check the SHA256\n    hash of the local file against the one advertised inside index.json, and\n    provided that hasn't changed, it won't try to download the file again.\n\n    \"\"\"\n    parameters = [\n        Parameter('url', global_alias='remote_assets_url',\n                  description=\"\"\"\n                  URL of the index file for assets on an HTTP server.\n                  \"\"\"),\n        Parameter('username',\n                  description=\"\"\"\n                  User name for authenticating with assets URL\n                  \"\"\"),\n        Parameter('password',\n                  description=\"\"\"\n                  Password for authenticationg with assets URL\n                  \"\"\"),\n        Parameter('always_fetch', kind=boolean, default=False,\n                  global_alias='always_fetch_remote_assets',\n                  description=\"\"\"\n                  If ``True``, will always attempt to fetch assets from the\n                  remote, even if a local cached copy is available.\n                  \"\"\"),\n        Parameter('chunk_size', kind=int, default=1024,\n                  description=\"\"\"\n                  Chunk size for streaming large assets.\n                  \"\"\"),\n    ]\n\n    def __init__(self, **kwargs):\n        super(Http, self).__init__(**kwargs)\n        self.logger = logger\n        self.index = {}\n\n    def register(self, resolver):\n        resolver.register(self.get, SourcePriority.remote)\n\n    def get(self, resource):\n        if not resource.owner:\n            return  # TODO: add support for unowned resources\n        if not self.index:\n            try:\n                self.index = self.fetch_index()\n            except requests.exceptions.RequestException as e:\n                msg = 'Skipping HTTP getter due to connection error: {}'\n                self.logger.debug(msg.format(e.message))\n                return\n        if resource.kind == 'apk':\n            # APKs must always be downloaded to run ApkInfo for version\n            # information.\n            return self.resolve_apk(resource)\n        else:\n            asset = self.resolve_resource(resource)\n            if not asset:\n                return\n            return self.download_asset(asset, resource.owner.name)\n\n    def fetch_index(self):\n        if not self.url:\n            return {}\n        index_url = urljoin(self.url, 'index.json')\n        response = self.geturl(index_url)\n        if response.status_code != http.client.OK:\n            message = 'Could not fetch \"{}\"; received \"{} {}\"'\n            self.logger.error(message.format(index_url,\n                                             response.status_code,\n                                             response.reason))\n            return {}\n        content = response.content.decode('utf-8')\n        return json.loads(content)\n\n    def download_asset(self, asset, owner_name):\n        url = urljoin(self.url, owner_name, asset['path'])\n        local_path = _f(os.path.join(settings.dependencies_directory, '__remote',\n                                     owner_name, asset['path'].replace('/', os.sep)))\n\n        if os.path.exists(local_path) and not self.always_fetch:\n            local_sha = sha256(local_path)\n            if local_sha == asset['sha256']:\n                self.logger.debug('Local SHA256 matches; not re-downloading')\n                return local_path\n        self.logger.debug('Downloading {}'.format(url))\n        response = self.geturl(url, stream=True)\n        if response.status_code != http.client.OK:\n            message = 'Could not download asset \"{}\"; received \"{} {}\"'\n            self.logger.warning(message.format(url,\n                                               response.status_code,\n                                               response.reason))\n            return\n        with atomic_write_path(local_path) as at_path:\n            with open(at_path, 'wb') as wfh:\n                for chunk in response.iter_content(chunk_size=self.chunk_size):\n                    wfh.write(chunk)\n        return local_path\n\n    def geturl(self, url, stream=False):\n        if self.username:\n            auth = (self.username, self.password)\n        else:\n            auth = None\n        return requests.get(url, auth=auth, stream=stream)\n\n    def resolve_apk(self, resource):\n        assets = self.index.get(resource.owner.name, {})\n        if not assets:\n            return None\n        asset_map = {a['path']: a for a in assets}\n        paths = get_path_matches(resource, list(asset_map.keys()))\n        local_paths = []\n        for path in paths:\n            local_paths.append(self.download_asset(asset_map[path],\n                                                   resource.owner.name))\n        for path in local_paths:\n            if resource.match(path):\n                return path\n\n    def resolve_resource(self, resource):\n        # pylint: disable=too-many-branches,too-many-locals\n        assets = self.index.get(resource.owner.name, {})\n        if not assets:\n            return {}\n\n        asset_map = {a['path']: a for a in assets}\n        if resource.kind in ['jar', 'revent']:\n            path = get_generic_resource(resource, list(asset_map.keys()))\n            if path:\n                return asset_map[path]\n        elif resource.kind == 'executable':\n            path = '/'.join(['bin', resource.abi, resource.filename])\n            for asset in assets:\n                if asset['path'].lower() == path.lower():\n                    return asset\n        else:  # file\n            for asset in assets:\n                if asset['path'].lower() == resource.path.lower():\n                    return asset\n\n\nclass Filer(ResourceGetter):\n\n    name = 'filer'\n    description = \"\"\"\n    Finds resources on a (locally mounted) remote filer and caches them\n    locally.\n\n    This assumes that the filer is mounted on the local machine (e.g. as a\n    samba share).\n\n    \"\"\"\n    parameters = [\n        Parameter('remote_path', global_alias='remote_assets_path',\n                  default=settings.assets_repository,\n                  description=\"\"\"\n                  Path, on the local system, where the assets are located.\n                  \"\"\"),\n        Parameter('always_fetch', kind=boolean, default=False,\n                  global_alias='always_fetch_remote_assets',\n                  description=\"\"\"\n                  If ``True``, will always attempt to fetch assets from the\n                  remote, even if a local cached copy is available.\n                  \"\"\"),\n    ]\n\n    def register(self, resolver):\n        resolver.register(self.get, SourcePriority.lan)\n\n    def get(self, resource):\n        if resource.owner:\n            remote_path = os.path.join(self.remote_path, resource.owner.name)\n            local_path = os.path.join(settings.dependencies_directory, '__filer',\n                                      resource.owner.dependencies_directory)\n            return self.try_get_resource(resource, remote_path, local_path)\n        else:  # No owner\n            result = None\n            for entry in os.listdir(remote_path):\n                remote_path = os.path.join(self.remote_path, entry)\n                local_path = os.path.join(settings.dependencies_directory, '__filer',\n                                          settings.dependencies_directory, entry)\n                result = self.try_get_resource(resource, remote_path, local_path)\n                if result:\n                    break\n            return result\n\n    def try_get_resource(self, resource, remote_path, local_path):\n        if not self.always_fetch:\n            result = get_from_location(local_path, resource)\n            if result:\n                return result\n        if not os.path.exists(local_path):\n            return None\n        if os.path.exists(remote_path):\n            # Didn't find it cached locally; now check the remoted\n            result = get_from_location(remote_path, resource)\n            if not result:\n                return result\n        else:  # remote path is not set\n            return None\n        # Found it remotely, cache locally, then return it\n        local_full_path = os.path.join(_d(local_path), os.path.basename(result))\n        self.logger.debug('cp {} {}'.format(result, local_full_path))\n        shutil.copy(result, local_full_path)\n        return result\n"
  },
  {
    "path": "wa/framework/host.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport shutil\n\nfrom wa.framework import pluginloader\nfrom wa.framework.configuration.core import (settings, ConfigurationPoint,\n                                             MetaConfiguration, RunConfiguration)\nfrom wa.framework.configuration.default import (generate_default_config,\n                                                write_param_yaml)\nfrom wa.framework.configuration.plugin_cache import PluginCache\nfrom wa.utils.misc import load_struct_from_python\nfrom wa.utils.serializer import yaml\nfrom wa.utils.types import identifier\n\n\n# Have to disable this due to dynamic attributes\n# pylint: disable=no-member\n\ndef init_user_directory(overwrite_existing=False):  # pylint: disable=R0914\n    \"\"\"\n    Initialise a fresh user directory.\n    \"\"\"\n    if os.path.exists(settings.user_directory):\n        if not overwrite_existing:\n            raise RuntimeError('Environment {} already exists.'.format(settings.user_directory))\n        shutil.rmtree(settings.user_directory)\n\n    os.makedirs(settings.user_directory)\n    os.makedirs(settings.dependencies_directory)\n    os.makedirs(settings.plugins_directory)\n    os.makedirs(settings.cache_directory)\n\n    generate_default_config(os.path.join(settings.user_directory, 'config.yaml'))\n\n    if os.getenv('USER') == 'root':\n        # If running with sudo on POSIX, change the ownership to the real user.\n        real_user = os.getenv('SUDO_USER')\n        if real_user:\n            # pylint: disable=import-outside-toplevel\n            import pwd  # done here as module won't import on win32\n            user_entry = pwd.getpwnam(real_user)\n            uid, gid = user_entry.pw_uid, user_entry.pw_gid\n            os.chown(settings.user_directory, uid, gid)\n            # why, oh why isn't there a recusive=True option for os.chown?\n            for root, dirs, files in os.walk(settings.user_directory):\n                for d in dirs:\n                    os.chown(os.path.join(root, d), uid, gid)\n                for f in files:\n                    os.chown(os.path.join(root, f), uid, gid)\n\n\ndef init_config():\n    \"\"\"\n    If configuration file is missing try to convert WA2 config if present\n    otherwise initialize fresh config file\n    \"\"\"\n    wa2_config_file = os.path.join(settings.user_directory, 'config.py')\n    wa3_config_file = os.path.join(settings.user_directory, 'config.yaml')\n    if os.path.exists(wa2_config_file):\n        convert_wa2_agenda(wa2_config_file, wa3_config_file)\n    else:\n        generate_default_config(wa3_config_file)\n\n\ndef convert_wa2_agenda(filepath, output_path):\n    \"\"\"\n    Convert WA2 .py config file to a WA3 .yaml config file.\n    \"\"\"\n\n    orig_agenda = load_struct_from_python(filepath)\n    new_agenda = {'augmentations': []}\n    config_points = MetaConfiguration.config_points + RunConfiguration.config_points\n\n    # Add additional config points to extract from config file.\n    # Also allows for aliasing of renamed parameters\n    config_points.extend([\n        ConfigurationPoint(\n            'augmentations',\n            aliases=[\"instruments\", \"processors\", \"instrumentation\",\n                     \"output_processors\", \"augment\", \"result_processors\"],\n            description='''\n                The augmentations enabled by default.\n                This combines the \"instrumentation\"\n                and \"result_processors\" from previous\n                versions of WA (the old entries are\n                now aliases for this).\n            '''),\n        ConfigurationPoint(\n            'device_config',\n            description='''Generic configuration for device.''',\n            default={}),\n        ConfigurationPoint(\n            'cleanup_assets',\n            aliases=['clean_up'],\n            description='''Specify whether to clean up assets\n                            deployed to the target''',\n            default=True),\n    ])\n\n    for param in list(orig_agenda.keys()):\n        for cfg_point in config_points:\n            if param == cfg_point.name or param in cfg_point.aliases:\n                if cfg_point.name == 'augmentations':\n                    new_agenda['augmentations'].extend(orig_agenda.pop(param))\n                else:\n                    new_agenda[cfg_point.name] = format_parameter(orig_agenda.pop(param))\n\n    with open(output_path, 'w') as output:\n        for param in config_points:\n            entry = {param.name: new_agenda.get(param.name, param.default)}\n            write_param_yaml(entry, param, output)\n\n        # Convert plugin configuration\n        output.write(\"# Plugin Configuration\\n\")\n        for param in list(orig_agenda.keys()):\n            if pluginloader.has_plugin(param):\n                entry = {param: orig_agenda.pop(param)}\n                yaml.dump(format_parameter(entry), output, default_flow_style=False)\n                output.write(\"\\n\")\n\n        # Write any additional aliased parameters into new config\n        plugin_cache = PluginCache()\n        output.write(\"# Additional global aliases\\n\")\n        for param in list(orig_agenda.keys()):\n            if plugin_cache.is_global_alias(param):\n                entry = {param: orig_agenda.pop(param)}\n                yaml.dump(format_parameter(entry), output, default_flow_style=False)\n                output.write(\"\\n\")\n\n\ndef format_parameter(param):\n    if isinstance(param, dict):\n        return {identifier(k): v for k, v in param.items()}\n    else:\n        return param\n"
  },
  {
    "path": "wa/framework/instrument.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nAdding New Instrument\n=====================\n\nAny new instrument should be a subclass of Instrument and it must have a name.\nWhen a new instrument is added to Workload Automation, the methods of the new\ninstrument will be found automatically and hooked up to the supported signals.\nOnce a signal is broadcasted, the corresponding registered method is invoked.\n\nEach method in Instrument must take two arguments, which are self and context.\nSupported signals can be found in [... link to signals ...] To make\nimplementations easier and common, the basic steps to add new instrument is\nsimilar to the steps to add new workload.\n\nHence, the following methods are sufficient to implement to add new instrument:\n\n    - setup: This method is invoked after the workload is setup. All the\n       necessary setups should go inside this method. Setup, includes operations\n       like, pushing the files to the target device, install them, clear logs,\n       etc.\n    - start: It is invoked just before the workload start execution. Here is\n       where instrument measures start being registered/taken.\n    - stop: It is invoked just after the workload execution stops. The measures\n       should stop being taken/registered.\n    - update_output: It is invoked after the workload updated its result.\n       update_output is where the taken measures are added to the output so it\n       can be processed by Workload Automation.\n    - teardown is invoked after the workload is teared down. It is a good place\n       to clean any logs generated by the instrument.\n\nFor example, to add an instrument which will trace device errors, we subclass\nInstrument and overwrite the variable name.::\n\n        #BINARY_FILE = os.path.join(os.path.dirname(__file__), 'trace')\n        class TraceErrorsInstrument(Instrument):\n\n            name = 'trace-errors'\n\n            def __init__(self, device):\n                super(TraceErrorsInstrument, self).__init__(device)\n                self.trace_on_device = os.path.join(self.device.working_directory, 'trace')\n\nWe then declare and implement the aforementioned methods. For the setup method,\nwe want to push the file to the target device and then change the file mode to\n755 ::\n\n    def setup(self, context):\n        self.device.push(BINARY_FILE, self.device.working_directory)\n        self.device.execute('chmod 755 {}'.format(self.trace_on_device))\n\nThen we implemented the start method, which will simply run the file to start\ntracing. ::\n\n    def start(self, context):\n        self.device.execute('{} start'.format(self.trace_on_device))\n\nLastly, we need to stop tracing once the workload stops and this happens in the\nstop method::\n\n    def stop(self, context):\n        self.device.execute('{} stop'.format(self.trace_on_device))\n\nThe generated output can be updated inside update_output, or if it is trace, we\njust pull the file to the host device. context has an output variable which\nhas add_metric method. It can be used to add the instruments results metrics\nto the final result for the workload. The method can be passed 4 params, which\nare metric key, value, unit and lower_is_better, which is a boolean. ::\n\n    def update_output(self, context):\n        # pull the trace file to the device\n        result = os.path.join(self.device.working_directory, 'trace.txt')\n        self.device.pull(result, context.working_directory)\n\n        # parse the file if needs to be parsed, or add result to\n        # context.result\n\nAt the end, we might want to delete any files generated by the instruments\nand the code to clear these file goes in teardown method. ::\n\n    def teardown(self, context):\n        self.device.remove(os.path.join(self.device.working_directory, 'trace.txt'))\n\n\"\"\"\n\nimport logging\nimport inspect\nfrom collections import OrderedDict\n\nfrom wa.framework import signal\nfrom wa.framework.plugin import TargetedPlugin\nfrom wa.framework.exception import (TargetNotRespondingError, TimeoutError,  # pylint: disable=redefined-builtin\n                                    WorkloadError, TargetError)\nfrom wa.utils.log import log_error\nfrom wa.utils.misc import isiterable\nfrom wa.utils.types import identifier, level\n\n\nlogger = logging.getLogger('instruments')\n\n\n# Maps method names onto signals the should be registered to.\n# Note: the begin/end signals are paired -- if a begin_ signal is sent,\n#       then the corresponding end_ signal is guaranteed to also be sent.\n# Note: using OrderedDict to preserve logical ordering for the table generated\n#       in the documentation\nSIGNAL_MAP = OrderedDict([\n    # Below are \"aliases\" for some of the more common signals to allow\n    # instruments to have similar structure to workloads\n    ('initialize', signal.RUN_INITIALIZED),\n    ('setup', signal.BEFORE_WORKLOAD_SETUP),\n    ('start', signal.BEFORE_WORKLOAD_EXECUTION),\n    ('stop', signal.AFTER_WORKLOAD_EXECUTION),\n    ('process_workload_output', signal.SUCCESSFUL_WORKLOAD_OUTPUT_UPDATE),\n    ('update_output', signal.AFTER_WORKLOAD_OUTPUT_UPDATE),\n    ('teardown', signal.AFTER_WORKLOAD_TEARDOWN),\n    ('finalize', signal.RUN_FINALIZED),\n\n    ('on_run_start', signal.RUN_STARTED),\n    ('on_run_end', signal.RUN_COMPLETED),\n\n    ('on_job_start', signal.JOB_STARTED),\n    ('on_job_restart', signal.JOB_RESTARTED),\n    ('on_job_end', signal.JOB_COMPLETED),\n    ('on_job_failure', signal.JOB_FAILED),\n    ('on_job_abort', signal.JOB_ABORTED),\n\n    ('before_job_queue_execution', signal.BEFORE_JOB_QUEUE_EXECUTION),\n    ('on_successful_job_queue_exection', signal.SUCCESSFUL_JOB_QUEUE_EXECUTION),\n    ('after_job_queue_execution', signal.AFTER_JOB_QUEUE_EXECUTION),\n\n    ('before_job', signal.BEFORE_JOB),\n    ('on_successful_job', signal.SUCCESSFUL_JOB),\n    ('after_job', signal.AFTER_JOB),\n    ('before_processing_job_output', signal.BEFORE_JOB_OUTPUT_PROCESSED),\n    ('on_successfully_processing_job', signal.SUCCESSFUL_JOB_OUTPUT_PROCESSED),\n    ('after_processing_job_output', signal.AFTER_JOB_OUTPUT_PROCESSED),\n\n    ('before_reboot', signal.BEFORE_REBOOT),\n    ('on_successful_reboot', signal.SUCCESSFUL_REBOOT),\n    ('after_reboot', signal.AFTER_REBOOT),\n\n    ('on_error', signal.ERROR_LOGGED),\n    ('on_warning', signal.WARNING_LOGGED),\n])\n\n\ndef get_priority(func):\n    return getattr(getattr(func, 'im_func', func),\n                   'priority', signal.CallbackPriority.normal)\n\n\ndef priority(priority):  # pylint: disable=redefined-outer-name\n    def decorate(func):\n        def wrapper(*args, **kwargs):\n            return func(*args, **kwargs)\n        wrapper.__name__ = func.__name__\n        if priority in signal.CallbackPriority.levels:\n            wrapper.priority = signal.CallbackPriority(priority)\n        else:\n            if not isinstance(priority, int):\n                msg = 'Invalid priorty \"{}\"; must be an int or one of {}'\n                raise ValueError(msg.format(priority, signal.CallbackPriority.values))\n            wrapper.priority = level('custom', priority)\n        return wrapper\n    return decorate\n\n\nextremely_slow = priority(signal.CallbackPriority.extremely_low)\nvery_slow = priority(signal.CallbackPriority.very_low)\nslow = priority(signal.CallbackPriority.low)\nnormal = priority(signal.CallbackPriority.normal)\nfast = priority(signal.CallbackPriority.high)\nvery_fast = priority(signal.CallbackPriority.very_high)\nextremely_fast = priority(signal.CallbackPriority.extremely_high)\n\n\ndef hostside(func):\n    \"\"\"\n    Used as a hint that the callback only performs actions on the\n    host and does not rely on an active connection to the target.\n    This means the callback will be invoked even if the target is\n    thought to be unresponsive.\n\n    \"\"\"\n    func.is_hostside = True\n    return func\n\n\ndef is_hostside(func):\n    return getattr(func, 'is_hostside', False)\n\n\ninstalled = []\n\n\ndef is_installed(instrument):\n    if isinstance(instrument, Instrument):\n        if instrument in installed:\n            return True\n        if instrument.name in [i.name for i in installed]:\n            return True\n    elif isinstance(instrument, type):\n        if instrument in [i.__class__ for i in installed]:\n            return True\n    else:  # assume string\n        if identifier(instrument) in [identifier(i.name) for i in installed]:\n            return True\n    return False\n\n\ndef is_enabled(instrument):\n    if isinstance(instrument, (Instrument, type)):\n        name = instrument.name\n    else:  # assume string\n        name = instrument\n    try:\n        installed_instrument = get_instrument(name)\n        return installed_instrument.is_enabled\n    except ValueError:\n        return False\n\n\nfailures_detected = False\n\n\ndef reset_failures():\n    global failures_detected  # pylint: disable=W0603\n    failures_detected = False\n\n\ndef check_failures():\n    result = failures_detected\n    reset_failures()\n    return result\n\n\nclass ManagedCallback(object):\n    \"\"\"\n    This wraps instruments' callbacks to ensure that errors do not interfer\n    with run execution.\n\n    \"\"\"\n\n    def __init__(self, instrument, callback):\n        self.instrument = instrument\n        self.callback = callback\n        self.is_hostside = is_hostside(callback)\n\n    def __call__(self, context):\n        if self.instrument.is_enabled:\n            try:\n                if not context.tm.is_responsive and not self.is_hostside:\n                    logger.debug(\"Target unresponsive; skipping callback {}\".format(self.callback))\n                    return\n                self.callback(context)\n            except (KeyboardInterrupt, TargetNotRespondingError, TimeoutError):  # pylint: disable=W0703\n                raise\n            except Exception as e:  # pylint: disable=W0703\n                logger.error('Error in instrument {}'.format(self.instrument.name))\n                global failures_detected  # pylint: disable=W0603\n                failures_detected = True\n                log_error(e, logger)\n                context.add_event(e.args[0] if e.args else str(e))\n                if isinstance(e, WorkloadError):\n                    context.set_status('FAILED')\n                elif isinstance(e, (TargetError, TimeoutError)):\n                    context.tm.verify_target_responsive(context)\n                else:\n                    if context.current_job:\n                        context.set_status('PARTIAL')\n                    else:\n                        raise\n\n    def __repr__(self):\n        text = 'ManagedCallback({}, {})'\n        return text.format(self.instrument.name, self.callback.__func__.__name__)\n\n    __str__ = __repr__\n\n\n# Need this to keep track of callbacks, because the dispatcher only keeps\n# weak references, so if the callbacks aren't referenced elsewhere, they will\n# be deallocated before they've had a chance to be invoked.\n_callbacks = []\n\n\ndef install(instrument, context):\n    \"\"\"\n    This will look for methods (or any callable members) with specific names\n    in the instrument and hook them up to the corresponding signals.\n\n    :param instrument: Instrument instance to install.\n\n    \"\"\"\n    # pylint: disable=redefined-outer-name\n    logger.debug('Installing instrument %s.', instrument)\n\n    if is_installed(instrument):\n        msg = 'Instrument {} is already installed.'\n        raise ValueError(msg.format(instrument.name))\n\n    for attr_name in dir(instrument):\n        if attr_name not in SIGNAL_MAP:\n            continue\n\n        attr = getattr(instrument, attr_name)\n\n        if not callable(attr):\n            msg = 'Attribute {} not callable in {}.'\n            raise ValueError(msg.format(attr_name, instrument))\n        argspec = inspect.getfullargspec(attr)\n        arg_num = len(argspec.args)\n        # Instrument callbacks will be passed exactly two arguments: self\n        # (the instrument instance to which the callback is bound) and\n        # context. However, we also allow callbacks to capture the context\n        # in variable arguments (declared as \"*args\" in the definition).\n        if arg_num > 2 or (arg_num < 2 and argspec.varargs is None):\n            message = '{} must take exactly 2 positional arguments; {} given.'\n            raise ValueError(message.format(attr_name, arg_num))\n\n        priority = get_priority(attr)\n        hostside = ' [hostside]' if is_hostside(attr) else ''\n        logger.debug('\\tConnecting %s to %s with priority %s(%d)%s', attr.__name__,\n                     SIGNAL_MAP[attr_name], priority.name, priority.value, hostside)\n\n        mc = ManagedCallback(instrument, attr)\n        _callbacks.append(mc)\n        signal.connect(mc, SIGNAL_MAP[attr_name], priority=priority.value)\n\n    instrument.logger.context = context\n    installed.append(instrument)\n    context.add_augmentation(instrument)\n\n\ndef uninstall(instrument):\n    instrument = get_instrument(instrument)\n    installed.remove(instrument)\n\n\ndef validate():\n    for instrument in installed:\n        instrument.validate()\n\n\ndef get_instrument(inst):\n    if isinstance(inst, Instrument):\n        return inst\n    for installed_inst in installed:\n        if identifier(installed_inst.name) == identifier(inst):\n            return installed_inst\n    raise ValueError('Instrument {} is not installed'.format(inst))\n\n\ndef disable_all():\n    for instrument in installed:\n        _disable_instrument(instrument)\n\n\ndef enable_all():\n    for instrument in installed:\n        _enable_instrument(instrument)\n\n\ndef enable(to_enable):\n    if isiterable(to_enable):\n        for inst in to_enable:\n            _enable_instrument(inst)\n    else:\n        _enable_instrument(to_enable)\n\n\ndef disable(to_disable):\n    if isiterable(to_disable):\n        for inst in to_disable:\n            _disable_instrument(inst)\n    else:\n        _disable_instrument(to_disable)\n\n\ndef _enable_instrument(inst):\n    inst = get_instrument(inst)\n    if not inst.is_broken:\n        logger.debug('Enabling instrument {}'.format(inst.name))\n        inst.is_enabled = True\n    else:\n        logger.debug('Not enabling broken instrument {}'.format(inst.name))\n\n\ndef _disable_instrument(inst):\n    inst = get_instrument(inst)\n    if inst.is_enabled:\n        logger.debug('Disabling instrument {}'.format(inst.name))\n        inst.is_enabled = False\n\n\ndef get_enabled():\n    return [i for i in installed if i.is_enabled]\n\n\ndef get_disabled():\n    return [i for i in installed if not i.is_enabled]\n\n\nclass Instrument(TargetedPlugin):\n    \"\"\"\n    Base class for instrument implementations.\n    \"\"\"\n    kind = \"instrument\"\n\n    def __init__(self, *args, **kwargs):\n        super(Instrument, self).__init__(*args, **kwargs)\n        self.is_enabled = True\n        self.is_broken = False\n"
  },
  {
    "path": "wa/framework/job.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# Because of use of Enum (dynamic attrs)\n# pylint: disable=no-member\n\nimport logging\nfrom copy import copy\nfrom datetime import datetime\n\nfrom wa.framework import pluginloader, signal, instrument\nfrom wa.framework.configuration.core import Status\nfrom wa.utils.log import indentcontext\nfrom wa.framework.run import JobState\n\n\nclass Job(object):\n\n    _workload_cache = {}\n\n    @property\n    def id(self):\n        return self.spec.id\n\n    @property\n    def label(self):\n        return self.spec.label\n\n    @property\n    def status(self):\n        return self.state.status\n\n    @property\n    def has_been_initialized(self):\n        return self._has_been_initialized\n\n    @property\n    def retries(self):\n        return self.state.retries\n\n    @status.setter\n    def status(self, value):\n        self.state.status = value\n        self.state.timestamp = datetime.utcnow()\n        if self.output:\n            self.output.status = value\n\n    @retries.setter\n    def retries(self, value):\n        self.state.retries = value\n\n    def __init__(self, spec, iteration, context):\n        self.logger = logging.getLogger('job')\n        self.spec = spec\n        self.iteration = iteration\n        self.context = context\n        self.workload = None\n        self.output = None\n        self.run_time = None\n        self.classifiers = copy(self.spec.classifiers)\n        self._has_been_initialized = False\n        self.state = JobState(self.id, self.label, self.iteration, Status.NEW)\n\n    def load(self, target, loader=pluginloader):\n        self.logger.info('Loading job {}'.format(self))\n        if self.id not in self._workload_cache:\n            self.workload = loader.get_workload(self.spec.workload_name,\n                                                target,\n                                                **self.spec.workload_parameters)\n            self.workload.init_resources(self.context)\n            self.workload.validate()\n            self._workload_cache[self.id] = self.workload\n        else:\n            self.workload = self._workload_cache[self.id]\n\n    def set_output(self, output):\n        output.classifiers = copy(self.classifiers)\n        self.output = output\n\n    def initialize(self, context):\n        self.logger.info('Initializing job {}'.format(self))\n        with indentcontext():\n            with signal.wrap('WORKLOAD_INITIALIZED', self, context):\n                self.workload.logger.context = context\n                self.workload.initialize(context)\n            self.set_status(Status.PENDING)\n            self._has_been_initialized = True\n\n    def configure_augmentations(self, context, pm):\n        self.logger.info('Configuring augmentations')\n        with indentcontext():\n            instruments_to_enable = set()\n            output_processors_to_enable = set()\n            enabled_instruments = set(i.name for i in instrument.get_enabled())\n            enabled_output_processors = set(p.name for p in pm.get_enabled())\n\n            for augmentation in list(self.spec.augmentations.values()):\n                augmentation_cls = context.cm.plugin_cache.get_plugin_class(augmentation)\n                if augmentation_cls.kind == 'instrument':\n                    instruments_to_enable.add(augmentation)\n                elif augmentation_cls.kind == 'output_processor':\n                    output_processors_to_enable.add(augmentation)\n\n            # Disable unrequired instruments\n            for instrument_name in enabled_instruments.difference(instruments_to_enable):\n                instrument.disable(instrument_name)\n            # Enable additional instruments\n            for instrument_name in instruments_to_enable.difference(enabled_instruments):\n                instrument.enable(instrument_name)\n\n            # Disable unrequired output_processors\n            for processor in enabled_output_processors.difference(output_processors_to_enable):\n                pm.disable(processor)\n            # Enable additional output_processors\n            for processor in output_processors_to_enable.difference(enabled_output_processors):\n                pm.enable(processor)\n\n    def configure_target(self, context):\n        self.logger.info('Configuring target for job {}'.format(self))\n        with indentcontext():\n            context.tm.commit_runtime_parameters(self.spec.runtime_parameters)\n\n    def setup(self, context):\n        self.logger.info('Setting up job {}'.format(self))\n        with indentcontext():\n            with signal.wrap('WORKLOAD_SETUP', self, context):\n                self.workload.setup(context)\n\n    def run(self, context):\n        self.logger.info('Running job {}'.format(self))\n        with indentcontext():\n            with signal.wrap('WORKLOAD_EXECUTION', self, context):\n                start_time = datetime.utcnow()\n                try:\n                    self.workload.run(context)\n                finally:\n                    self.run_time = datetime.utcnow() - start_time\n\n    def process_output(self, context):\n        if not context.tm.is_responsive:\n            self.logger.info('Target unresponsive; not processing job output.')\n            return\n        self.logger.info('Processing output for job {}'.format(self))\n        with indentcontext():\n            if self.status != Status.FAILED:\n                with signal.wrap('WORKLOAD_RESULT_EXTRACTION', self, context):\n                    self.workload.extract_results(context)\n                    context.extract_results()\n                with signal.wrap('WORKLOAD_OUTPUT_UPDATE', self, context):\n                    self.workload.update_output(context)\n\n    def teardown(self, context):\n        if not context.tm.is_responsive:\n            self.logger.info('Target unresponsive; not tearing down.')\n            return\n        self.logger.info('Tearing down job {}'.format(self))\n        with indentcontext():\n            with signal.wrap('WORKLOAD_TEARDOWN', self, context):\n                self.workload.teardown(context)\n\n    def finalize(self, context):\n        if not self._has_been_initialized:\n            return\n        if not context.tm.is_responsive:\n            self.logger.info('Target unresponsive; not finalizing.')\n            return\n        self.logger.info('Finalizing job {} '.format(self))\n        with indentcontext():\n            with signal.wrap('WORKLOAD_FINALIZED', self, context):\n                self.workload.finalize(context)\n\n    def set_status(self, status, force=False):\n        status = Status(status)\n        if force or self.status < status:\n            self.status = status\n\n    def add_classifier(self, name, value, overwrite=False):\n        if name in self.classifiers and not overwrite:\n            raise ValueError('Cannot overwrite \"{}\" classifier.'.format(name))\n        self.classifiers[name] = value\n\n    def __str__(self):\n        return '{} ({}) [{}]'.format(self.id, self.label, self.iteration)\n\n    def __repr__(self):\n        return 'Job({})'.format(self)\n"
  },
  {
    "path": "wa/framework/output.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\ntry:\n    import psycopg2\n    from psycopg2 import Error as Psycopg2Error\nexcept ImportError:\n    psycopg2 = None\n    Psycopg2Error = None\n\nimport logging\nimport os\nimport shutil\nimport tarfile\nimport tempfile\nfrom collections import OrderedDict, defaultdict\nfrom copy import copy, deepcopy\nfrom datetime import datetime\nfrom io import StringIO\n\nimport devlib\n\nfrom wa.framework.configuration.core import JobSpec, Status\nfrom wa.framework.configuration.execution import CombinedConfig\nfrom wa.framework.exception import HostError, SerializerSyntaxError, ConfigError\nfrom wa.framework.run import RunState, RunInfo\nfrom wa.framework.target.info import TargetInfo\nfrom wa.framework.version import get_wa_version_with_commit\nfrom wa.utils.doc import format_simple_table\nfrom wa.utils.misc import (touch, ensure_directory_exists, isiterable,\n                           format_ordered_dict, safe_extract)\nfrom wa.utils.postgres import get_schema_versions\nfrom wa.utils.serializer import write_pod, read_pod, Podable, json\nfrom wa.utils.types import enum, numeric\n\n\nlogger = logging.getLogger('output')\n\n\nclass Output(object):\n\n    kind = None\n\n    @property\n    def resultfile(self):\n        return os.path.join(self.basepath, 'result.json')\n\n    @property\n    def event_summary(self):\n        num_events = len(self.events)\n        if num_events:\n            lines = self.events[0].message.split('\\n')\n            message = '({} event(s)): {}'\n            if num_events > 1 or len(lines) > 1:\n                message += '[...]'\n            return message.format(num_events, lines[0])\n        return ''\n\n    @property\n    def status(self):\n        if self.result is None:\n            return None\n        return self.result.status\n\n    @status.setter\n    def status(self, value):\n        self.result.status = value\n\n    @property\n    def metrics(self):\n        if self.result is None:\n            return []\n        return self.result.metrics\n\n    @property\n    def artifacts(self):\n        if self.result is None:\n            return []\n        return self.result.artifacts\n\n    @property\n    def classifiers(self):\n        if self.result is None:\n            return OrderedDict()\n        return self.result.classifiers\n\n    @classifiers.setter\n    def classifiers(self, value):\n        if self.result is None:\n            msg = 'Attempting to set classifiers before output has been set'\n            raise RuntimeError(msg)\n        self.result.classifiers = value\n\n    @property\n    def events(self):\n        if self.result is None:\n            return []\n        return self.result.events\n\n    @property\n    def metadata(self):\n        if self.result is None:\n            return {}\n        return self.result.metadata\n\n    def __init__(self, path):\n        self.basepath = path\n        self.result = None\n\n    def reload(self):\n        try:\n            if os.path.isdir(self.basepath):\n                pod = read_pod(self.resultfile)\n                self.result = Result.from_pod(pod)\n            else:\n                self.result = Result()\n                self.result.status = Status.PENDING\n        except Exception as e:  # pylint: disable=broad-except\n            self.result = Result()\n            self.result.status = Status.UNKNOWN\n            self.add_event(str(e))\n\n    def write_result(self):\n        write_pod(self.result.to_pod(), self.resultfile)\n\n    def get_path(self, subpath):\n        return os.path.join(self.basepath, subpath.strip(os.sep))\n\n    def add_metric(self, name, value, units=None, lower_is_better=False,\n                   classifiers=None):\n        self.result.add_metric(name, value, units, lower_is_better, classifiers)\n\n    def add_artifact(self, name, path, kind, description=None, classifiers=None):\n        if not os.path.exists(path):\n            path = self.get_path(path)\n        if not os.path.exists(path):\n            msg = 'Attempting to add non-existing artifact: {}'\n            raise HostError(msg.format(path))\n        is_dir = os.path.isdir(path)\n        path = os.path.relpath(path, self.basepath)\n\n        self.result.add_artifact(name, path, kind, description, classifiers, is_dir)\n\n    def add_event(self, message):\n        self.result.add_event(message)\n\n    def get_metric(self, name):\n        return self.result.get_metric(name)\n\n    def get_artifact(self, name):\n        return self.result.get_artifact(name)\n\n    def get_artifact_path(self, name):\n        artifact = self.get_artifact(name)\n        return self.get_path(artifact.path)\n\n    def add_classifier(self, name, value, overwrite=False):\n        self.result.add_classifier(name, value, overwrite)\n\n    def add_metadata(self, key, *args, **kwargs):\n        self.result.add_metadata(key, *args, **kwargs)\n\n    def update_metadata(self, key, *args):\n        self.result.update_metadata(key, *args)\n\n    def __repr__(self):\n        return '<{} {}>'.format(self.__class__.__name__,\n                                os.path.basename(self.basepath))\n\n    def __str__(self):\n        return os.path.basename(self.basepath)\n\n\nclass RunOutputCommon(object):\n    ''' Split out common functionality to form a second base of\n        the RunOutput classes\n    '''\n    @property\n    def run_config(self):\n        if self._combined_config:\n            return self._combined_config.run_config\n\n    @property\n    def settings(self):\n        if self._combined_config:\n            return self._combined_config.settings\n\n    def get_job_spec(self, spec_id):\n        for spec in self.job_specs:\n            if spec.id == spec_id:\n                return spec\n        return None\n\n    def list_workloads(self):\n        workloads = []\n        for job in self.jobs:\n            if job.label not in workloads:\n                workloads.append(job.label)\n        return workloads\n\n\nclass RunOutput(Output, RunOutputCommon):\n\n    kind = 'run'\n\n    @property\n    def logfile(self):\n        return os.path.join(self.basepath, 'run.log')\n\n    @property\n    def metadir(self):\n        return os.path.join(self.basepath, '__meta')\n\n    @property\n    def infofile(self):\n        return os.path.join(self.metadir, 'run_info.json')\n\n    @property\n    def statefile(self):\n        return os.path.join(self.basepath, '.run_state.json')\n\n    @property\n    def configfile(self):\n        return os.path.join(self.metadir, 'config.json')\n\n    @property\n    def targetfile(self):\n        return os.path.join(self.metadir, 'target_info.json')\n\n    @property\n    def jobsfile(self):\n        return os.path.join(self.metadir, 'jobs.json')\n\n    @property\n    def raw_config_dir(self):\n        return os.path.join(self.metadir, 'raw_config')\n\n    @property\n    def failed_dir(self):\n        path = os.path.join(self.basepath, '__failed')\n        return ensure_directory_exists(path)\n\n    @property\n    def augmentations(self):\n        run_augs = set([])\n        for job in self.jobs:\n            for aug in job.spec.augmentations:\n                run_augs.add(aug)\n        return list(run_augs)\n\n    def __init__(self, path):\n        super(RunOutput, self).__init__(path)\n        self.info = None\n        self.state = None\n        self.result = None\n        self.target_info = None\n        self._combined_config = None\n        self.jobs = []\n        self.job_specs = []\n        if (not os.path.isfile(self.statefile)\n                or not os.path.isfile(self.infofile)):\n            msg = '\"{}\" does not exist or is not a valid WA output directory.'\n            raise ValueError(msg.format(self.basepath))\n        self.reload()\n\n    def reload(self):\n        super(RunOutput, self).reload()\n        self.info = RunInfo.from_pod(read_pod(self.infofile))\n        self.state = RunState.from_pod(read_pod(self.statefile))\n        if os.path.isfile(self.configfile):\n            self._combined_config = CombinedConfig.from_pod(read_pod(self.configfile))\n        if os.path.isfile(self.targetfile):\n            self.target_info = TargetInfo.from_pod(read_pod(self.targetfile))\n        if os.path.isfile(self.jobsfile):\n            self.job_specs = self.read_job_specs()\n\n        for job_state in self.state.jobs.values():\n            job_path = os.path.join(self.basepath, job_state.output_name)\n            job = JobOutput(job_path, job_state.id,\n                            job_state.label, job_state.iteration,\n                            job_state.retries)\n            job.status = job_state.status\n            job.spec = self.get_job_spec(job.id)\n            if job.spec is None:\n                logger.warning('Could not find spec for job {}'.format(job.id))\n            self.jobs.append(job)\n\n    def write_info(self):\n        write_pod(self.info.to_pod(), self.infofile)\n\n    def write_state(self):\n        write_pod(self.state.to_pod(), self.statefile)\n\n    def write_config(self, config):\n        self._combined_config = config\n        write_pod(config.to_pod(), self.configfile)\n\n    def read_config(self):\n        if not os.path.isfile(self.configfile):\n            return None\n        return CombinedConfig.from_pod(read_pod(self.configfile))\n\n    def set_target_info(self, ti):\n        self.target_info = ti\n        write_pod(ti.to_pod(), self.targetfile)\n\n    def write_job_specs(self, job_specs):\n        job_specs[0].to_pod()\n        js_pod = {'jobs': [js.to_pod() for js in job_specs]}\n        write_pod(js_pod, self.jobsfile)\n\n    def read_job_specs(self):\n        if not os.path.isfile(self.jobsfile):\n            return None\n        pod = read_pod(self.jobsfile)\n        return [JobSpec.from_pod(jp) for jp in pod['jobs']]\n\n    def move_failed(self, job_output):\n        name = os.path.basename(job_output.basepath)\n        attempt = job_output.retry + 1\n        failed_name = '{}-attempt{:02}'.format(name, attempt)\n        failed_path = os.path.join(self.failed_dir, failed_name)\n        if os.path.exists(failed_path):\n            raise ValueError('Path {} already exists'.format(failed_path))\n        shutil.move(job_output.basepath, failed_path)\n        job_output.basepath = failed_path\n\n\nclass JobOutput(Output):\n\n    kind = 'job'\n\n    # pylint: disable=redefined-builtin\n    def __init__(self, path, id, label, iteration, retry):\n        super(JobOutput, self).__init__(path)\n        self.id = id\n        self.label = label\n        self.iteration = iteration\n        self.retry = retry\n        self.result = None\n        self.spec = None\n        self.reload()\n\n    @property\n    def augmentations(self):\n        job_augs = set([])\n        for aug in self.spec.augmentations:\n            job_augs.add(aug)\n        return list(job_augs)\n\n\nclass Result(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        instance = super(Result, Result).from_pod(pod)\n        instance.status = Status.from_pod(pod['status'])\n        instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]\n        instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]\n        instance.events = [Event.from_pod(e) for e in pod['events']]\n        instance.classifiers = pod.get('classifiers', OrderedDict())\n        instance.metadata = pod.get('metadata', OrderedDict())\n        return instance\n\n    def __init__(self):\n        # pylint: disable=no-member\n        super(Result, self).__init__()\n        self.status = Status.NEW\n        self.metrics = []\n        self.artifacts = []\n        self.events = []\n        self.classifiers = OrderedDict()\n        self.metadata = OrderedDict()\n\n    def add_metric(self, name, value, units=None, lower_is_better=False,\n                   classifiers=None):\n        metric = Metric(name, value, units, lower_is_better, classifiers)\n        logger.debug('Adding metric: {}'.format(metric))\n        self.metrics.append(metric)\n\n    def add_artifact(self, name, path, kind, description=None, classifiers=None,\n                     is_dir=False):\n        artifact = Artifact(name, path, kind, description=description,\n                            classifiers=classifiers, is_dir=is_dir)\n        logger.debug('Adding artifact: {}'.format(artifact))\n        self.artifacts.append(artifact)\n\n    def add_event(self, message):\n        self.events.append(Event(message))\n\n    def get_metric(self, name):\n        for metric in self.metrics:\n            if metric.name == name:\n                return metric\n        return None\n\n    def get_artifact(self, name):\n        for artifact in self.artifacts:\n            if artifact.name == name:\n                return artifact\n        raise HostError('Artifact \"{}\" not found'.format(name))\n\n    def add_classifier(self, name, value, overwrite=False):\n        if name in self.classifiers and not overwrite:\n            raise ValueError('Cannot overwrite \"{}\" classifier.'.format(name))\n        self.classifiers[name] = value\n\n        for metric in self.metrics:\n            if name in metric.classifiers and not overwrite:\n                raise ValueError('Cannot overwrite \"{}\" classifier; clashes with {}.'.format(name, metric))\n            metric.classifiers[name] = value\n\n        for artifact in self.artifacts:\n            if name in artifact.classifiers and not overwrite:\n                raise ValueError('Cannot overwrite \"{}\" classifier; clashes with {}.'.format(name, artifact))\n            artifact.classifiers[name] = value\n\n    def add_metadata(self, key, *args, **kwargs):\n        force = kwargs.pop('force', False)\n        if kwargs:\n            msg = 'Unexpected keyword arguments: {}'\n            raise ValueError(msg.format(kwargs))\n\n        if key in self.metadata and not force:\n            msg = 'Metadata with key \"{}\" already exists.'\n            raise ValueError(msg.format(key))\n\n        if len(args) == 1:\n            value = args[0]\n        elif len(args) == 2:\n            value = {args[0]: args[1]}\n        elif not args:\n            value = None\n        else:\n            raise ValueError(\"Unexpected arguments: {}\".format(args))\n\n        self.metadata[key] = value\n\n    def update_metadata(self, key, *args):\n        if not args:\n            del self.metadata[key]\n            return\n\n        if key not in self.metadata:\n            return self.add_metadata(key, *args)\n\n        if hasattr(self.metadata[key], 'items'):\n            if len(args) == 2:\n                self.metadata[key][args[0]] = args[1]\n            elif len(args) > 2:  # assume list of key-value pairs\n                for k, v in args:\n                    self.metadata[key][k] = v\n            elif hasattr(args[0], 'items'):\n                for k, v in args[0].items():\n                    self.metadata[key][k] = v\n            else:\n                raise ValueError('Invalid value for key \"{}\": {}'.format(key, args))\n\n        elif isiterable(self.metadata[key]):\n            self.metadata[key].extend(args)\n        else:   # scalar\n            if len(args) > 1:\n                raise ValueError('Invalid value for key \"{}\": {}'.format(key, args))\n            self.metadata[key] = args[0]\n\n    def to_pod(self):\n        pod = super(Result, self).to_pod()\n        pod['status'] = self.status.to_pod()\n        pod['metrics'] = [m.to_pod() for m in self.metrics]\n        pod['artifacts'] = [a.to_pod() for a in self.artifacts]\n        pod['events'] = [e.to_pod() for e in self.events]\n        pod['classifiers'] = copy(self.classifiers)\n        pod['metadata'] = deepcopy(self.metadata)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        pod['status'] = Status(pod['status']).to_pod()\n        return pod\n\n\nARTIFACT_TYPES = ['log', 'meta', 'data', 'export', 'raw']\nArtifactType = enum(ARTIFACT_TYPES)\n\n\nclass Artifact(Podable):\n    \"\"\"\n    This is an artifact generated during execution/post-processing of a\n    workload.  Unlike metrics, this represents an actual artifact, such as a\n    file, generated.  This may be \"output\", such as trace, or it could be \"meta\n    data\" such as logs.  These are distinguished using the ``kind`` attribute,\n    which also helps WA decide how it should be handled. Currently supported\n    kinds are:\n\n        :log: A log file. Not part of the \"output\" as such but contains\n              information about the run/workload execution that be useful for\n              diagnostics/meta analysis.\n        :meta: A file containing metadata. This is not part of the \"output\", but\n               contains information that may be necessary to reproduce the\n               results (contrast with ``log`` artifacts which are *not*\n               necessary).\n        :data: This file contains new data, not available otherwise and should\n               be considered part of the \"output\" generated by WA. Most traces\n               would fall into this category.\n        :export: Exported version of results or some other artifact. This\n                 signifies that this artifact does not contain any new data\n                 that is not available elsewhere and that it may be safely\n                 discarded without losing information.\n        :raw: Signifies that this is a raw dump/log that is normally processed\n              to extract useful information and is then discarded. In a sense,\n              it is the opposite of ``export``, but in general may also be\n              discarded.\n\n              .. note:: whether a file is marked as ``log``/``data`` or ``raw``\n                        depends on how important it is to preserve this file,\n                        e.g. when archiving, vs how much space it takes up.\n                        Unlike ``export`` artifacts which are (almost) always\n                        ignored by other exporters as that would never result\n                        in data loss, ``raw`` files *may* be processed by\n                        exporters if they decided that the risk of losing\n                        potentially (though unlikely) useful data is greater\n                        than the time/space cost of handling the artifact (e.g.\n                        a database uploader may choose to ignore ``raw``\n                        artifacts, where as a network filer archiver may choose\n                        to archive them).\n\n        .. note: The kind parameter is intended to represent the logical\n                 function of a particular artifact, not it's intended means of\n                 processing -- this is left entirely up to the output\n                 processors.\n\n    \"\"\"\n\n    _pod_serialization_version = 2\n\n    @staticmethod\n    def from_pod(pod):\n        pod = Artifact._upgrade_pod(pod)\n        pod_version = pod.pop('_pod_version')\n        pod['kind'] = ArtifactType(pod['kind'])\n        instance = Artifact(**pod)\n        instance._pod_version = pod_version  # pylint: disable =protected-access\n        instance.is_dir = pod.pop('is_dir')\n        return instance\n\n    def __init__(self, name, path, kind, description=None, classifiers=None,\n                 is_dir=False):\n        \"\"\"\"\n        :param name: Name that uniquely identifies this artifact.\n        :param path: The *relative* path of the artifact. Depending on the\n                     ``level`` must be either relative to the run or iteration\n                     output directory.  Note: this path *must* be delimited\n                     using ``/`` irrespective of the\n                     operating system.\n        :param kind: The type of the artifact this is (e.g. log file, result,\n                     etc.) this will be used as a hint to output processors. This\n                     must be one of ``'log'``, ``'meta'``, ``'data'``,\n                     ``'export'``, ``'raw'``.\n        :param description: A free-form description of what this artifact is.\n        :param classifiers: A set of key-value pairs to further classify this\n                            metric beyond current iteration (e.g. this can be\n                            used to identify sub-tests).\n        \"\"\"\n        super(Artifact, self).__init__()\n        self.name = name\n        self.path = path.replace('/', os.sep) if path is not None else path\n        try:\n            self.kind = ArtifactType(kind)\n        except ValueError:\n            msg = 'Invalid Artifact kind: {}; must be in {}'\n            raise ValueError(msg.format(kind, ARTIFACT_TYPES))\n        self.description = description\n        self.classifiers = classifiers or {}\n        self.is_dir = is_dir\n\n    def to_pod(self):\n        pod = super(Artifact, self).to_pod()\n        pod.update(self.__dict__)\n        pod['kind'] = str(self.kind)\n        pod['is_dir'] = self.is_dir\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v2(pod):\n        pod['is_dir'] = pod.get('is_dir', False)\n        return pod\n\n    def __str__(self):\n        return self.path\n\n    def __repr__(self):\n        ft = 'dir' if self.is_dir else 'file'\n        return '{} ({}) ({}): {}'.format(self.name, ft, self.kind, self.path)\n\n\nclass Metric(Podable):\n    \"\"\"\n    This is a single metric collected from executing a workload.\n\n    :param name: the name of the metric. Uniquely identifies the metric\n                 within the results.\n    :param value: The numerical value of the metric for this execution of a\n                  workload. This can be either an int or a float.\n    :param units: Units for the collected value. Can be None if the value\n                  has no units (e.g. it's a count or a standardised score).\n    :param lower_is_better: Boolean flag indicating where lower values are\n                            better than higher ones. Defaults to False.\n    :param classifiers: A set of key-value pairs to further classify this\n                        metric beyond current iteration (e.g. this can be used\n                        to identify sub-tests).\n\n    \"\"\"\n    __slots__ = ['name', 'value', 'units', 'lower_is_better', 'classifiers']\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = Metric._upgrade_pod(pod)\n        pod_version = pod.pop('_pod_version')\n        instance = Metric(**pod)\n        instance._pod_version = pod_version  # pylint: disable =protected-access\n        return instance\n\n    @property\n    def label(self):\n        parts = ['{}={}'.format(n, v) for n, v in self.classifiers.items()]\n        parts.insert(0, self.name)\n        return '/'.join(parts)\n\n    def __init__(self, name, value, units=None, lower_is_better=False,\n                 classifiers=None):\n        super(Metric, self).__init__()\n        self.name = name\n        self.value = numeric(value)\n        self.units = units\n        self.lower_is_better = lower_is_better\n        self.classifiers = classifiers or {}\n\n    def to_pod(self):\n        pod = super(Metric, self).to_pod()\n        pod['name'] = self.name\n        pod['value'] = self.value\n        pod['units'] = self.units\n        pod['lower_is_better'] = self.lower_is_better\n        pod['classifiers'] = self.classifiers\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __str__(self):\n        result = '{}: {}'.format(self.name, self.value)\n        if self.units:\n            result += ' ' + self.units\n        result += ' ({})'.format('-' if self.lower_is_better else '+')\n        return result\n\n    def __repr__(self):\n        text = self.__str__()\n        if self.classifiers:\n            return '<{} {}>'.format(text, format_ordered_dict(self.classifiers))\n        else:\n            return '<{}>'.format(text)\n\n\nclass Event(Podable):\n    \"\"\"\n    An event that occured during a run.\n\n    \"\"\"\n\n    __slots__ = ['timestamp', 'message']\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = Event._upgrade_pod(pod)\n        pod_version = pod.pop('_pod_version')\n        instance = Event(pod['message'])\n        instance.timestamp = pod['timestamp']\n        instance._pod_version = pod_version  # pylint: disable =protected-access\n        return instance\n\n    @property\n    def summary(self):\n        lines = self.message.split('\\n')\n        result = lines[0]\n        if len(lines) > 1:\n            result += '[...]'\n        return result\n\n    def __init__(self, message):\n        super(Event, self).__init__()\n        self.timestamp = datetime.utcnow()\n        self.message = str(message)\n\n    def to_pod(self):\n        pod = super(Event, self).to_pod()\n        pod['timestamp'] = self.timestamp\n        pod['message'] = self.message\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __str__(self):\n        return '[{}] {}'.format(self.timestamp, self.message)\n\n    __repr__ = __str__\n\n\ndef init_run_output(path, wa_state, force=False):\n    if os.path.exists(path):\n        if force:\n            logger.info('Removing existing output directory.')\n            shutil.rmtree(os.path.abspath(path))\n        else:\n            raise RuntimeError('path exists: {}'.format(path))\n\n    logger.info('Creating output directory.')\n    os.makedirs(path)\n    meta_dir = os.path.join(path, '__meta')\n    os.makedirs(meta_dir)\n    _save_raw_config(meta_dir, wa_state)\n    touch(os.path.join(path, 'run.log'))\n\n    info = RunInfo(\n        run_name=wa_state.run_config.run_name,\n        project=wa_state.run_config.project,\n        project_stage=wa_state.run_config.project_stage,\n    )\n    write_pod(info.to_pod(), os.path.join(meta_dir, 'run_info.json'))\n    write_pod(RunState().to_pod(), os.path.join(path, '.run_state.json'))\n    write_pod(Result().to_pod(), os.path.join(path, 'result.json'))\n\n    ro = RunOutput(path)\n    ro.update_metadata('versions', 'wa', get_wa_version_with_commit())\n    ro.update_metadata('versions', 'devlib', devlib.__full_version__)\n\n    return ro\n\n\ndef init_job_output(run_output, job):\n    output_name = '{}-{}-{}'.format(job.id, job.spec.label, job.iteration)\n    path = os.path.join(run_output.basepath, output_name)\n    ensure_directory_exists(path)\n    write_pod(Result().to_pod(), os.path.join(path, 'result.json'))\n    job_output = JobOutput(path, job.id, job.label, job.iteration, job.retries)\n    job_output.spec = job.spec\n    job_output.status = job.status\n    run_output.jobs.append(job_output)\n    return job_output\n\n\ndef discover_wa_outputs(path):\n    # Use topdown=True to allow pruning dirs\n    for root, dirs, _ in os.walk(path, topdown=True):\n        if '__meta' in dirs:\n            yield RunOutput(root)\n            # Avoid recursing into the artifact as it can be very lengthy if a\n            # large number of file is present (sysfs dump)\n            dirs.clear()\n\n\ndef _save_raw_config(meta_dir, state):\n    raw_config_dir = os.path.join(meta_dir, 'raw_config')\n    os.makedirs(raw_config_dir)\n\n    for i, source in enumerate(state.loaded_config_sources):\n        if not os.path.isfile(source):\n            continue\n        basename = os.path.basename(source)\n        dest_path = os.path.join(raw_config_dir, 'cfg{}-{}'.format(i, basename))\n        shutil.copy(source, dest_path)\n\n\nclass DatabaseOutput(Output):\n\n    kind = None\n\n    @property\n    def resultfile(self):\n        if self.conn is None or self.oid is None:\n            return {}\n        pod = self._get_pod_version()\n        pod['metrics'] = self._get_metrics()\n        pod['status'] = self._get_status()\n        pod['classifiers'] = self._get_classifiers(self.oid, 'run')\n        pod['events'] = self._get_events()\n        pod['artifacts'] = self._get_artifacts()\n        return pod\n\n    @staticmethod\n    def _build_command(columns, tables, conditions=None, joins=None):\n        cmd = '''SELECT\\n\\t{}\\nFROM\\n\\t{}'''.format(',\\n\\t'.join(columns), ',\\n\\t'.join(tables))\n        if joins:\n            for join in joins:\n                cmd += '''\\nLEFT JOIN {} ON {}'''.format(join[0], join[1])\n        if conditions:\n            cmd += '''\\nWHERE\\n\\t{}'''.format('\\nAND\\n\\t'.join(conditions))\n        return cmd + ';'\n\n    def __init__(self, conn, oid=None, reload=True):  # pylint: disable=super-init-not-called\n        self.conn = conn\n        self.oid = oid\n        self.result = None\n        if reload:\n            self.reload()\n\n    def __repr__(self):\n        return '<{} {}>'.format(self.__class__.__name__, self.oid)\n\n    def __str__(self):\n        return self.oid\n\n    def reload(self):\n        try:\n            self.result = Result.from_pod(self.resultfile)\n        except Exception as e:  # pylint: disable=broad-except\n            self.result = Result()\n            self.result.status = Status.UNKNOWN\n            self.add_event(str(e))\n\n    def get_artifact_path(self, name):\n        artifact = self.get_artifact(name)\n        if artifact.is_dir:\n            return self._read_dir_artifact(artifact)\n        else:\n            return self._read_file_artifact(artifact)\n\n    def _read_dir_artifact(self, artifact):\n        artifact_path = tempfile.mkdtemp(prefix='wa_')\n        with tarfile.open(fileobj=self.conn.lobject(int(artifact.path), mode='b'), mode='r|gz') as tar_file:\n            safe_extract(tar_file, artifact_path)\n        self.conn.commit()\n        return artifact_path\n\n    def _read_file_artifact(self, artifact):\n        artifact = StringIO(self.conn.lobject(int(artifact.path)).read())\n        self.conn.commit()\n        return artifact\n\n    # pylint: disable=too-many-locals\n    def _read_db(self, columns, tables, conditions=None, join=None, as_dict=True):\n        # Automatically remove table name from column when using column names as keys or\n        # allow for column names to be aliases when retrieving the data,\n        # (db_column_name, alias)\n        db_columns = []\n        aliases_colunms = []\n        for column in columns:\n            if isinstance(column, tuple):\n                db_columns.append(column[0])\n                aliases_colunms.append(column[1])\n            else:\n                db_columns.append(column)\n                aliases_colunms.append(column.rsplit('.', 1)[-1])\n\n        cmd = self._build_command(db_columns, tables, conditions, join)\n\n        logger.debug(cmd)\n        with self.conn.cursor() as cursor:\n            cursor.execute(cmd)\n            results = cursor.fetchall()\n        self.conn.commit()\n\n        if not as_dict:\n            return results\n\n        # Format the output dict using column names as keys\n        output = []\n        for result in results:\n            entry = {}\n            for k, v in zip(aliases_colunms, result):\n                entry[k] = v\n            output.append(entry)\n        return output\n\n    def _get_pod_version(self):\n        columns = ['_pod_version', '_pod_serialization_version']\n        tables = ['{}s'.format(self.kind)]\n        conditions = ['{}s.oid = \\'{}\\''.format(self.kind, self.oid)]\n        results = self._read_db(columns, tables, conditions)\n        if results:\n            return results[0]\n        else:\n            return None\n\n    def _populate_classifers(self, pod, kind):\n        for entry in pod:\n            oid = entry.pop('oid')\n            entry['classifiers'] = self._get_classifiers(oid, kind)\n        return pod\n\n    def _get_classifiers(self, oid, kind):\n        columns = ['classifiers.key', 'classifiers.value']\n        tables = ['classifiers']\n        conditions = ['{}_oid = \\'{}\\''.format(kind, oid)]\n        results = self._read_db(columns, tables, conditions, as_dict=False)\n        classifiers = {}\n        for (k, v) in results:\n            classifiers[k] = v\n        return classifiers\n\n    def _get_metrics(self):\n        columns = ['metrics.name', 'metrics.value', 'metrics.units',\n                   'metrics.lower_is_better',\n                   'metrics.oid', 'metrics._pod_version',\n                   'metrics._pod_serialization_version']\n        tables = ['metrics']\n        joins = [('classifiers', 'classifiers.metric_oid = metrics.oid')]\n        conditions = ['metrics.{}_oid  = \\'{}\\''.format(self.kind, self.oid)]\n        pod = self._read_db(columns, tables, conditions, joins)\n        return self._populate_classifers(pod, 'metric')\n\n    def _get_status(self):\n        columns = ['{}s.status'.format(self.kind)]\n        tables = ['{}s'.format(self.kind)]\n        conditions = ['{}s.oid = \\'{}\\''.format(self.kind, self.oid)]\n        results = self._read_db(columns, tables, conditions, as_dict=False)\n        if results:\n            return results[0][0]\n        else:\n            return None\n\n    def _get_artifacts(self):\n        columns = ['artifacts.name', 'artifacts.description', 'artifacts.kind',\n                   ('largeobjects.lo_oid', 'path'), 'artifacts.oid', 'artifacts.is_dir',\n                   'artifacts._pod_version', 'artifacts._pod_serialization_version']\n        tables = ['largeobjects', 'artifacts']\n        joins = [('classifiers', 'classifiers.artifact_oid = artifacts.oid')]\n        conditions = ['artifacts.{}_oid = \\'{}\\''.format(self.kind, self.oid),\n                      'artifacts.large_object_uuid = largeobjects.oid']\n        # If retrieving run level artifacts we want those that don't also belong to a job\n        if self.kind == 'run':\n            conditions.append('artifacts.job_oid IS NULL')\n        pod = self._read_db(columns, tables, conditions, joins)\n        for artifact in pod:\n            artifact['path'] = str(artifact['path'])\n        return self._populate_classifers(pod, 'metric')\n\n    def _get_events(self):\n        columns = ['events.message', 'events.timestamp']\n        tables = ['events']\n        conditions = ['events.{}_oid = \\'{}\\''.format(self.kind, self.oid)]\n        return self._read_db(columns, tables, conditions)\n\n\ndef kernel_config_from_db(raw):\n    kernel_config = {}\n    if raw:\n        for k, v in zip(raw[0], raw[1]):\n            kernel_config[k] = v\n    return kernel_config\n\n\nclass RunDatabaseOutput(DatabaseOutput, RunOutputCommon):\n\n    kind = 'run'\n\n    @property\n    def basepath(self):\n        return 'db:({})-{}@{}:{}'.format(self.dbname, self.user,\n                                         self.host, self.port)\n\n    @property\n    def augmentations(self):\n        columns = ['augmentations.name']\n        tables = ['augmentations']\n        conditions = ['augmentations.run_oid = \\'{}\\''.format(self.oid)]\n        results = self._read_db(columns, tables, conditions, as_dict=False)\n        return [a for augs in results for a in augs]\n\n    @property\n    def _db_infofile(self):\n        columns = ['start_time', 'project', ('run_uuid', 'uuid'), 'end_time',\n                   'run_name', 'duration', '_pod_version', '_pod_serialization_version']\n        tables = ['runs']\n        conditions = ['runs.run_uuid = \\'{}\\''.format(self.run_uuid)]\n        pod = self._read_db(columns, tables, conditions)\n        if not pod:\n            return {}\n        return pod[0]\n\n    @property\n    def _db_targetfile(self):\n        columns = ['os', 'is_rooted', 'target', 'modules', 'abi', 'cpus', 'os_version',\n                   'hostid', 'hostname', 'kernel_version', 'kernel_release',\n                   'kernel_sha1', 'kernel_config', 'sched_features', 'page_size_kb',\n                   'system_id', 'screen_resolution', 'prop', 'android_id',\n                   '_pod_version', '_pod_serialization_version']\n        tables = ['targets']\n        conditions = ['targets.run_oid = \\'{}\\''.format(self.oid)]\n        pod = self._read_db(columns, tables, conditions)\n        if not pod:\n            return {}\n        pod = pod[0]\n        try:\n            pod['cpus'] = [json.loads(cpu) for cpu in pod.pop('cpus')]\n        except SerializerSyntaxError:\n            pod['cpus'] = []\n            logger.debug('Failed to deserialize target cpu information')\n        pod['kernel_config'] = kernel_config_from_db(pod['kernel_config'])\n        return pod\n\n    @property\n    def _db_statefile(self):\n        # Read overall run information\n        columns = ['runs.state']\n        tables = ['runs']\n        conditions = ['runs.run_uuid = \\'{}\\''.format(self.run_uuid)]\n        pod = self._read_db(columns, tables, conditions)\n        pod = pod[0].get('state')\n        if not pod:\n            return {}\n\n        # Read job information\n        columns = ['jobs.job_id', 'jobs.oid']\n        tables = ['jobs']\n        conditions = ['jobs.run_oid = \\'{}\\''.format(self.oid)]\n        job_oids = self._read_db(columns, tables, conditions)\n\n        # Match job oid with jobs from state file\n        for job in pod.get('jobs', []):\n            for job_oid in job_oids:\n                if job['id'] == job_oid['job_id']:\n                    job['oid'] = job_oid['oid']\n                    break\n        return pod\n\n    @property\n    def _db_jobsfile(self):\n        workload_params = self._get_parameters('workload')\n        runtime_params = self._get_parameters('runtime')\n\n        columns = [('jobs.job_id', 'id'), 'jobs.label', 'jobs.workload_name',\n                   'jobs.oid', 'jobs._pod_version', 'jobs._pod_serialization_version']\n        tables = ['jobs']\n        conditions = ['jobs.run_oid = \\'{}\\''.format(self.oid)]\n        jobs = self._read_db(columns, tables, conditions)\n\n        for job in jobs:\n            job['augmentations'] = self._get_job_augmentations(job['oid'])\n            job['workload_parameters'] = workload_params.pop(job['oid'], {})\n            job['runtime_parameters'] = runtime_params.pop(job['oid'], {})\n            job.pop('oid')\n        return jobs\n\n    @property\n    def _db_run_config(self):\n        pod = defaultdict(dict)\n        parameter_types = ['augmentation', 'resource_getter']\n        for parameter_type in parameter_types:\n            columns = ['parameters.name', 'parameters.value',\n                       'parameters.value_type',\n                       ('{}s.name'.format(parameter_type), '{}'.format(parameter_type))]\n            tables = ['parameters', '{}s'.format(parameter_type)]\n            conditions = ['parameters.run_oid = \\'{}\\''.format(self.oid),\n                          'parameters.type = \\'{}\\''.format(parameter_type),\n                          'parameters.{0}_oid = {0}s.oid'.format(parameter_type)]\n            configs = self._read_db(columns, tables, conditions)\n            for config in configs:\n                entry = {config['name']: json.loads(config['value'])}\n                pod['{}s'.format(parameter_type)][config.pop(parameter_type)] = entry\n\n        # run config\n        columns = ['runs.max_retries', 'runs.allow_phone_home',\n                   'runs.bail_on_init_failure', 'runs.retry_on_status']\n        tables = ['runs']\n        conditions = ['runs.oid = \\'{}\\''.format(self.oid)]\n        config = self._read_db(columns, tables, conditions)\n        if not config:\n            return {}\n\n        config = config[0]\n        # Convert back into a string representation of an enum list\n        config['retry_on_status'] = config['retry_on_status'][1:-1].split(',')\n        pod.update(config)\n        return pod\n\n    def __init__(self,\n                 password=None,\n                 dbname='wa',\n                 host='localhost',\n                 port='5432',\n                 user='postgres',\n                 run_uuid=None,\n                 list_runs=False):\n\n        if psycopg2 is None:\n            msg = 'Please install the psycopg2 in order to connect to postgres databases'\n            raise HostError(msg)\n\n        self.dbname = dbname\n        self.host = host\n        self.port = port\n        self.user = user\n        self.password = password\n        self.run_uuid = run_uuid\n        self.conn = None\n\n        self.info = None\n        self.state = None\n        self.result = None\n        self.target_info = None\n        self._combined_config = None\n        self.jobs = []\n        self.job_specs = []\n\n        self.connect()\n        super(RunDatabaseOutput, self).__init__(conn=self.conn, reload=False)\n\n        local_schema_version, db_schema_version = get_schema_versions(self.conn)\n        if local_schema_version != db_schema_version:\n            self.disconnect()\n            msg = 'The current database schema is v{} however the local ' \\\n                  'schema version is v{}. Please update your database ' \\\n                  'with the create command'\n            raise HostError(msg.format(db_schema_version, local_schema_version))\n\n        if list_runs:\n            print('Available runs are:')\n            self._list_runs()\n            self.disconnect()\n            return\n        if not self.run_uuid:\n            print('Please specify \"Run uuid\"')\n            self._list_runs()\n            self.disconnect()\n            return\n\n        if not self.oid:\n            self.oid = self._get_oid()\n        self.reload()\n\n    def read_job_specs(self):\n        job_specs = []\n        for job in self._db_jobsfile:\n            job_specs.append(JobSpec.from_pod(job))\n        return job_specs\n\n    def connect(self):\n        if self.conn and not self.conn.closed:\n            return\n        try:\n            self.conn = psycopg2.connect(dbname=self.dbname,\n                                         user=self.user,\n                                         host=self.host,\n                                         password=self.password,\n                                         port=self.port)\n        except Psycopg2Error as e:\n            raise HostError('Unable to connect to the Database: \"{}'.format(e.args[0]))\n\n    def disconnect(self):\n        self.conn.commit()\n        self.conn.close()\n\n    def reload(self):\n        super(RunDatabaseOutput, self).reload()\n        info_pod = self._db_infofile\n        state_pod = self._db_statefile\n        if not info_pod or not state_pod:\n            msg = '\"{}\" does not appear to be a valid WA Database Output.'\n            raise ValueError(msg.format(self.oid))\n\n        self.info = RunInfo.from_pod(info_pod)\n        self.state = RunState.from_pod(state_pod)\n        self._combined_config = CombinedConfig.from_pod({'run_config': self._db_run_config})\n        self.target_info = TargetInfo.from_pod(self._db_targetfile)\n        self.job_specs = self.read_job_specs()\n\n        for job_state in self._db_statefile['jobs']:\n            job = JobDatabaseOutput(self.conn, job_state.get('oid'), job_state['id'],\n                                    job_state['label'], job_state['iteration'],\n                                    job_state['retries'])\n            job.status = job_state['status']\n            job.spec = self.get_job_spec(job.id)\n            if job.spec is None:\n                logger.warning('Could not find spec for job {}'.format(job.id))\n            self.jobs.append(job)\n\n    def _get_oid(self):\n        columns = ['{}s.oid'.format(self.kind)]\n        tables = ['{}s'.format(self.kind)]\n        conditions = ['runs.run_uuid = \\'{}\\''.format(self.run_uuid)]\n        oid = self._read_db(columns, tables, conditions, as_dict=False)\n        if not oid:\n            raise ConfigError('No matching run entries found for run_uuid {}'.format(self.run_uuid))\n        if len(oid) > 1:\n            raise ConfigError('Multiple entries found for run_uuid: {}'.format(self.run_uuid))\n        return oid[0][0]\n\n    def _get_parameters(self, param_type):\n        columns = ['parameters.job_oid', 'parameters.name', 'parameters.value']\n        tables = ['parameters']\n        conditions = ['parameters.type = \\'{}\\''.format(param_type),\n                      'parameters.run_oid = \\'{}\\''.format(self.oid)]\n        params = self._read_db(columns, tables, conditions, as_dict=False)\n        parm_dict = defaultdict(dict)\n        for (job_oid, k, v) in params:\n            try:\n                parm_dict[job_oid][k] = json.loads(v)\n            except SerializerSyntaxError:\n                logger.debug('Failed to deserialize job_oid:{}-\"{}\":\"{}\"'.format(job_oid, k, v))\n        return parm_dict\n\n    def _get_job_augmentations(self, job_oid):\n        columns = ['jobs_augs.augmentation_oid', 'augmentations.name',\n                   'augmentations.oid', 'jobs_augs.job_oid']\n        tables = ['jobs_augs', 'augmentations']\n        conditions = ['jobs_augs.job_oid = \\'{}\\''.format(job_oid),\n                      'jobs_augs.augmentation_oid = augmentations.oid']\n        augmentations = self._read_db(columns, tables, conditions)\n        return [aug['name'] for aug in augmentations]\n\n    def _list_runs(self):\n        columns = ['runs.run_uuid', 'runs.run_name', 'runs.project',\n                   'runs.project_stage', 'runs.status', 'runs.start_time', 'runs.end_time']\n        tables = ['runs']\n        pod = self._read_db(columns, tables)\n        if pod:\n            headers = ['Run Name', 'Project', 'Project Stage', 'Start Time', 'End Time',\n                       'run_uuid']\n            run_list = []\n            for entry in pod:\n                # Format times to display better\n                start_time = entry['start_time']\n                end_time = entry['end_time']\n                if start_time:\n                    start_time = start_time.strftime(\"%Y-%m-%d %H:%M:%S\")\n                if end_time:\n                    end_time = end_time.strftime(\"%Y-%m-%d %H:%M:%S\")\n\n                run_list.append([\n                                entry['run_name'],\n                                entry['project'],\n                                entry['project_stage'],\n                                start_time,\n                                end_time,\n                                entry['run_uuid']])\n\n            print(format_simple_table(run_list, headers))\n        else:\n            print('No Runs Found')\n\n\nclass JobDatabaseOutput(DatabaseOutput):\n\n    kind = 'job'\n\n    def __init__(self, conn, oid, job_id, label, iteration, retry):\n        super(JobDatabaseOutput, self).__init__(conn, oid=oid)\n        self.id = job_id\n        self.label = label\n        self.iteration = iteration\n        self.retry = retry\n        self.result = None\n        self.spec = None\n        self.reload()\n\n    def __repr__(self):\n        return '<{} {}-{}-{}>'.format(self.__class__.__name__,\n                                      self.id, self.label, self.iteration)\n\n    def __str__(self):\n        return '{}-{}-{}'.format(self.id, self.label, self.iteration)\n\n    @property\n    def augmentations(self):\n        job_augs = set([])\n        if self.spec:\n            for aug in self.spec.augmentations:\n                job_augs.add(aug)\n        return list(job_augs)\n"
  },
  {
    "path": "wa/framework/output_processor.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\n\nfrom wa.framework import pluginloader\nfrom wa.framework.exception import ConfigError\nfrom wa.framework.instrument import is_installed\nfrom wa.framework.plugin import Plugin\nfrom wa.utils.log import log_error, indentcontext\nfrom wa.utils.misc import isiterable\nfrom wa.utils.types import identifier\n\n\nclass OutputProcessor(Plugin):\n\n    kind = 'output_processor'\n    requires = []\n\n    def __init__(self, **kwargs):\n        super(OutputProcessor, self).__init__(**kwargs)\n        self.is_enabled = True\n\n    def validate(self):\n        super(OutputProcessor, self).validate()\n        for instrument in self.requires:\n            if not is_installed(instrument):\n                msg = 'Instrument \"{}\" is required by {}, but is not installed.'\n                raise ConfigError(msg.format(instrument, self.name))\n\n    def initialize(self, context):\n        pass\n\n    def finalize(self, context):\n        pass\n\n\nclass ProcessorManager(object):\n\n    def __init__(self, loader=pluginloader):\n        self.loader = loader\n        self.logger = logging.getLogger('processor')\n        self.processors = []\n\n    def install(self, processor, context):\n        if not isinstance(processor, OutputProcessor):\n            processor = self.loader.get_output_processor(processor)\n        self.logger.debug('Installing {}'.format(processor.name))\n        processor.logger.context = context\n        self.processors.append(processor)\n        context.add_augmentation(processor)\n\n    def disable_all(self):\n        for output_processor in self.processors:\n            self._disable_output_processor(output_processor)\n\n    def enable_all(self):\n        for output_processor in self.processors:\n            self._enable_output_processor(output_processor)\n\n    def enable(self, to_enable):\n        if isiterable(to_enable):\n            for inst in to_enable:\n                self._enable_output_processor(inst)\n        else:\n            self._enable_output_processor(to_enable)\n\n    def disable(self, to_disable):\n        if isiterable(to_disable):\n            for inst in to_disable:\n                self._disable_output_processor(inst)\n        else:\n            self._disable_output_processor(to_disable)\n\n    def get_output_processor(self, processor):\n        if isinstance(processor, OutputProcessor):\n            return processor\n\n        processor = identifier(processor)\n        for p in self.processors:\n            if processor == p.name:\n                return p\n        raise ValueError('Output processor {} is not installed'.format(processor))\n\n    def get_enabled(self):\n        return [p for p in self.processors if p.is_enabled]\n\n    def get_disabled(self):\n        return [p for p in self.processors if not p.is_enabled]\n\n    def validate(self):\n        for proc in self.processors:\n            proc.validate()\n\n    def initialize(self, context):\n        for proc in self.processors:\n            proc.initialize(context)\n\n    def finalize(self, context):\n        for proc in self.processors:\n            proc.finalize(context)\n\n    def process_job_output(self, context):\n        self.do_for_each_proc('process_job_output', 'Processing using \"{}\"',\n                              context.job_output, context.target_info,\n                              context.run_output)\n\n    def export_job_output(self, context):\n        self.do_for_each_proc('export_job_output', 'Exporting using \"{}\"',\n                              context.job_output, context.target_info,\n                              context.run_output)\n\n    def process_run_output(self, context):\n        self.do_for_each_proc('process_run_output', 'Processing using \"{}\"',\n                              context.run_output, context.target_info)\n\n    def export_run_output(self, context):\n        self.do_for_each_proc('export_run_output', 'Exporting using \"{}\"',\n                              context.run_output, context.target_info)\n\n    def do_for_each_proc(self, method_name, message, *args):\n        with indentcontext():\n            for proc in self.processors:\n                if proc.is_enabled:\n                    proc_func = getattr(proc, method_name, None)\n                    if proc_func is None:\n                        continue\n                    try:\n                        self.logger.info(message.format(proc.name))\n                        proc_func(*args)\n                    except Exception as e:  # pylint: disable=broad-except\n                        if isinstance(e, KeyboardInterrupt):\n                            raise\n                        log_error(e, self.logger)\n\n    def _enable_output_processor(self, inst):\n        inst = self.get_output_processor(inst)\n        self.logger.debug('Enabling output processor {}'.format(inst.name))\n        if not inst.is_enabled:\n            inst.is_enabled = True\n\n    def _disable_output_processor(self, inst):\n        inst = self.get_output_processor(inst)\n        self.logger.debug('Disabling output processor {}'.format(inst.name))\n        if inst.is_enabled:\n            inst.is_enabled = False\n"
  },
  {
    "path": "wa/framework/plugin.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=E1101\nimport os\nimport sys\nimport inspect\nimport logging\nfrom collections import OrderedDict, defaultdict\nfrom itertools import chain\nfrom copy import copy\n\nfrom future.utils import with_metaclass\n\nfrom wa.framework.configuration.core import settings, ConfigurationPoint as Parameter\nfrom wa.framework.exception import (NotFoundError, PluginLoaderError, TargetError,\n                                    ValidationError, ConfigError, HostError)\nfrom wa.utils import log\nfrom wa.utils.misc import (ensure_directory_exists as _d, walk_modules, load_class,\n                           merge_dicts_simple, get_article, import_path)\nfrom wa.utils.types import identifier\n\n\nclass AttributeCollection(object):\n    \"\"\"\n    Accumulator for plugin attribute objects (such as Parameters or Artifacts).\n\n    This will replace any class member list accumulating such attributes\n    through the magic of metaprogramming\\ [*]_.\n\n    .. [*] which is totally safe and not going backfire in any way...\n\n    \"\"\"\n\n    @property\n    def values(self):\n        return list(self._attrs.values())\n\n    def __init__(self, attrcls):\n        self._attrcls = attrcls\n        self._attrs = OrderedDict()\n\n    def add(self, p):\n        p = self._to_attrcls(p)\n        if p.name in self._attrs:\n            if p.override:\n                newp = copy(self._attrs[p.name])\n                for a, v in p.__dict__.items():\n                    if v is not None:\n                        setattr(newp, a, v)\n                if not hasattr(newp, \"_overridden\"):\n                    # pylint: disable=protected-access\n                    newp._overridden = p._owner\n                self._attrs[p.name] = newp\n            else:\n                # Duplicate attribute condition is check elsewhere.\n                pass\n        else:\n            self._attrs[p.name] = p\n\n    append = add\n\n    def __str__(self):\n        return 'AC({})'.format(list(map(str, list(self._attrs.values()))))\n\n    __repr__ = __str__\n\n    def _to_attrcls(self, p):\n        if not isinstance(p, self._attrcls):\n            raise ValueError('Invalid attribute value: {}; must be a {}'.format(p, self._attrcls))\n        if p.name in self._attrs and not p.override:\n            raise ValueError('Attribute {} has already been defined.'.format(p.name))\n        return p\n\n    def __iadd__(self, other):\n        for p in other:\n            self.add(p)\n        return self\n\n    def __iter__(self):\n        return iter(self.values)\n\n    def __contains__(self, p):\n        return p in self._attrs\n\n    def __getitem__(self, i):\n        return self._attrs[i]\n\n    def __len__(self):\n        return len(self._attrs)\n\n\nclass AliasCollection(AttributeCollection):\n\n    def __init__(self):\n        super(AliasCollection, self).__init__(Alias)\n\n    def _to_attrcls(self, p):\n        if isinstance(p, (list, tuple)):\n            # must be in the form (name, {param: value, ...})\n            # pylint: disable=protected-access\n            p = self._attrcls(p[1], **p[1])\n        elif not isinstance(p, self._attrcls):\n            raise ValueError('Invalid parameter value: {}'.format(p))\n        if p.name in self._attrs:\n            raise ValueError('Attribute {} has already been defined.'.format(p.name))\n        return p\n\n\nclass ListCollection(list):\n\n    def __init__(self, attrcls):  # pylint: disable=unused-argument\n        super(ListCollection, self).__init__()\n\n\nclass Alias(object):\n    \"\"\"\n    This represents a configuration alias for an plugin, mapping an alternative\n    name to a set of parameter values, effectively providing an alternative set\n    of default values.\n\n    \"\"\"\n\n    def __init__(self, name, **kwargs):\n        self.name = name\n        self.params = kwargs\n        self.plugin_name = None  # gets set by the MetaClass\n\n    def validate(self, ext):\n        ext_params = set(p.name for p in ext.parameters)\n        for param in self.params:\n            if param not in ext_params:\n                # Raising config error because aliases might have come through\n                # the config.\n                msg = 'Parameter {} (defined in alias {}) is invalid for {}'\n                raise ConfigError(msg.format(param, self.name, ext.name))\n\n\n# pylint: disable=bad-mcs-classmethod-argument\nclass PluginMeta(type):\n    \"\"\"\n    This basically adds some magic to plugins to make implementing new plugins,\n    such as workloads less complicated.\n\n    It ensures that certain class attributes (specified by the ``to_propagate``\n    attribute of the metaclass) get propagated down the inheritance hierarchy.\n    The assumption is that the values of the attributes specified in the class\n    are iterable; if that is not met, Bad Things (tm) will happen.\n\n    \"\"\"\n\n    to_propagate = [\n        ('parameters', Parameter, AttributeCollection),\n    ]\n\n    def __new__(mcs, clsname, bases, attrs):\n        mcs._propagate_attributes(bases, attrs, clsname)\n        cls = type.__new__(mcs, clsname, bases, attrs)\n        mcs._setup_aliases(cls)\n        return cls\n\n    @classmethod\n    def _propagate_attributes(mcs, bases, attrs, clsname):  # pylint: disable=too-many-locals\n        # pylint: disable=protected-access\n        \"\"\"\n        For attributes specified by to_propagate, their values will be a union of\n        that specified for cls and its bases (cls values overriding those of bases\n        in case of conflicts).\n\n        \"\"\"\n        for prop_attr, attr_cls, attr_collector_cls in mcs.to_propagate:\n            should_propagate = False\n            propagated = attr_collector_cls(attr_cls)\n            for base in bases:\n                if hasattr(base, prop_attr):\n                    propagated += getattr(base, prop_attr) or []\n                    should_propagate = True\n            if prop_attr in attrs:\n                pattrs = attrs[prop_attr] or []\n                for pa in pattrs:\n                    if not isinstance(pa, attr_cls):\n                        msg = 'Invalid value \"{}\" for attribute \"{}\"; must be a {}'\n                        raise ValueError(msg.format(pa, prop_attr, attr_cls))\n                    pa._owner = clsname\n                propagated += pattrs\n                should_propagate = True\n            if should_propagate:\n                for p in propagated:\n                    override = bool(getattr(p, \"override\", None))\n                    overridden = bool(getattr(p, \"_overridden\", None))\n                    if override != overridden:\n                        msg = \"Overriding non existing parameter '{}' inside '{}'\"\n                        raise ValueError(msg.format(p.name, p._owner))\n                attrs[prop_attr] = propagated\n\n    @classmethod\n    def _setup_aliases(mcs, cls):\n        if hasattr(cls, 'aliases'):\n            aliases, cls.aliases = cls.aliases, AliasCollection()\n            for alias in aliases:\n                if isinstance(alias, str):\n                    alias = Alias(alias)\n                alias.validate(cls)\n                alias.plugin_name = cls.name\n                cls.aliases.add(alias)\n\n\nclass Plugin(with_metaclass(PluginMeta, object)):\n    \"\"\"\n    Base class for all WA plugins. An plugin is basically a plug-in.  It\n    extends the functionality of WA in some way. Plugins are discovered and\n    loaded dynamically by the plugin loader upon invocation of WA scripts.\n    Adding an plugin is a matter of placing a class that implements an\n    appropriate interface somewhere it would be discovered by the loader. That\n    \"somewhere\" is typically one of the plugin subdirectories under\n    ``~/.workload_automation/``.\n\n    \"\"\"\n\n    kind = None\n    name = None\n    parameters = []\n    artifacts = []\n    aliases = []\n    core_modules = []\n\n    @classmethod\n    def get_default_config(cls):\n        return {p.name: p.default for p in cls.parameters if not p.deprecated}\n\n    @property\n    def dependencies_directory(self):\n        return _d(os.path.join(settings.dependencies_directory, self.name))\n\n    @property\n    def _classname(self):\n        return self.__class__.__name__\n\n    def __init__(self, **kwargs):\n        self.logger = logging.getLogger(self.name)\n        self._modules = []\n        self.capabilities = getattr(self.__class__, 'capabilities', [])\n        for param in self.parameters:\n            param.set_value(self, kwargs.get(param.name))\n        for key in kwargs:\n            if key not in self.parameters:\n                message = 'Unexpected parameter \"{}\" for {}'\n                raise ConfigError(message.format(key, self.name))\n\n    def get_config(self):\n        \"\"\"\n        Returns current configuration (i.e. parameter values) of this plugin.\n\n        \"\"\"\n        config = {}\n        for param in self.parameters:\n            config[param.name] = getattr(self, param.name, None)\n        return config\n\n    def validate(self):\n        \"\"\"\n        Perform basic validation to ensure that this plugin is capable of\n        running.  This is intended as an early check to ensure the plugin has\n        not been mis-configured, rather than a comprehensive check (that may,\n        e.g., require access to the execution context).\n\n        This method may also be used to enforce (i.e. set as well as check)\n        inter-parameter constraints for the plugin (e.g. if valid values for\n        parameter A depend on the value of parameter B -- something that is not\n        possible to enfroce using ``Parameter``\\ 's ``constraint`` attribute.\n\n        \"\"\"\n        if self.name is None:\n            raise ValidationError('Name not set for {}'.format(self._classname))\n        for param in self.parameters:\n            param.validate(self)\n\n    def __getattr__(self, name):\n        if name == '_modules':\n            raise ValueError('_modules accessed too early!')\n        for module in self._modules:\n            if hasattr(module, name):\n                return getattr(module, name)\n        raise AttributeError(name)\n\n    def load_modules(self, loader):\n        \"\"\"\n        Load the modules specified by the \"modules\" Parameter using the\n        provided loader. A loader can be any object that has an atribute called\n        \"get_module\" that implements the following signature::\n\n            get_module(name, owner, **kwargs)\n\n        and returns an instance of :class:`wa.core.plugin.Module`. If the\n        module with the specified name is not found, the loader must raise an\n        appropriate exception.\n\n        \"\"\"\n        modules = list(reversed(self.core_modules))\n        modules += list(reversed(self.modules or []))\n        if not modules:\n            return\n        for module_spec in modules:\n            if not module_spec:\n                continue\n            module = self._load_module(loader, module_spec)\n            self._install_module(module)\n\n    def has(self, capability):\n        \"\"\"\n        Check if this plugin has the specified capability. The alternative\n        method ``can`` is identical to this. Which to use is up to the caller\n        depending on what makes semantic sense in the context of the\n        capability, e.g. ``can('hard_reset')`` vs  ``has('active_cooling')``.\n\n        \"\"\"\n        return capability in self.capabilities\n\n    can = has\n\n    def _load_module(self, loader, module_spec):\n        if isinstance(module_spec, str):\n            name = module_spec\n            params = {}\n        elif isinstance(module_spec, dict):\n            if len(module_spec) != 1:\n                msg = 'Invalid module spec: {}; dict must have exctly one key -- '\\\n                      'the module name.'\n                raise ValueError(msg.format(module_spec))\n            name, params = list(module_spec.items())[0]\n        else:\n            message = 'Invalid module spec: {}; must be a string or a one-key dict.'\n            raise ValueError(message.format(module_spec))\n\n        if not isinstance(params, dict):\n            message = 'Invalid module spec: {}; dict value must also be a dict.'\n            raise ValueError(message.format(module_spec))\n\n        module = loader.get_module(name, owner=self, **params)\n        module.initialize(None)\n        return module\n\n    def _install_module(self, module):\n        for capability in module.capabilities:\n            if capability not in self.capabilities:\n                self.capabilities.append(capability)\n        self._modules.append(module)\n\n    def __str__(self):\n        return str(self.name)\n\n    def __repr__(self):\n        params = []\n        for param in self.parameters:\n            params.append('{}={}'.format(param.name,\n                                         getattr(self, param.name, None)))\n        return '{}({})'.format(self.name, ', '.join(params))\n\n\nclass TargetedPlugin(Plugin):\n    \"\"\"\n    A plugin that interacts with a target device.\n\n    \"\"\"\n\n    supported_targets = []\n    parameters = [\n        Parameter('cleanup_assets', kind=bool,\n                  global_alias='cleanup_assets',\n                  aliases=['clean_up'],\n                  default=True,\n                  description=\"\"\"\n                  If ``True``, assets that are deployed or created by the\n                  plugin will be removed again from the device.\n                  \"\"\"),\n    ]\n\n    @classmethod\n    def check_compatible(cls, target):\n        if cls.supported_targets:\n            if target.os not in cls.supported_targets:\n                msg = 'Incompatible target OS \"{}\" for {}'\n                raise TargetError(msg.format(target.os, cls.name))\n\n    def __init__(self, target, **kwargs):\n        super(TargetedPlugin, self).__init__(**kwargs)\n        self.check_compatible(target)\n        self.target = target\n\n\nclass PluginLoaderItem(object):\n\n    def __init__(self, ext_tuple):\n        self.name = ext_tuple.name\n        self.default_package = ext_tuple.default_package\n        self.default_path = ext_tuple.default_path\n        self.cls = load_class(ext_tuple.cls)\n\n\nclass PluginLoader(object):\n    \"\"\"\n    Discovers, enumerates and loads available devices, configs, etc.\n    The loader will attempt to discover things on construction by looking\n    in predetermined set of locations defined by default_paths. Optionally,\n    additional locations may specified through paths parameter that must\n    be a list of additional Python module paths (i.e. dot-delimited).\n\n    \"\"\"\n\n    def __init__(self, packages=None, paths=None, ignore_paths=None,\n                 keep_going=False):\n        \"\"\"\n        params::\n\n            :packages: List of packages to load plugins from.\n            :paths: List of paths to be searched for Python modules containing\n                    WA plugins.\n            :ignore_paths: List of paths to ignore when search for WA plugins\n                           (these would typically be subdirectories of one or\n                           more locations listed in ``paths`` parameter.\n            :keep_going: Specifies whether to keep going if an error occurs while\n                         loading plugins.\n        \"\"\"\n        self.logger = logging.getLogger('pluginloader')\n        self.keep_going = keep_going\n        self.packages = packages or []\n        self.paths = paths or []\n        self.ignore_paths = ignore_paths or []\n        self.plugins = {}\n        self.kind_map = defaultdict(dict)\n        self.aliases = {}\n        self.global_param_aliases = {}\n        self._discover_from_packages(self.packages)\n        self._discover_from_paths(self.paths, self.ignore_paths)\n\n    def update(self, packages=None, paths=None, ignore_paths=None):\n        \"\"\" Load plugins from the specified paths/packages\n        without clearing or reloading existing plugin. \"\"\"\n        msg = 'Updating from: packages={} paths={}'\n        self.logger.debug(msg.format(packages, paths))\n        if packages:\n            self.packages.extend(packages)\n            self._discover_from_packages(packages)\n        if paths:\n            self.paths.extend(paths)\n            self.ignore_paths.extend(ignore_paths or [])\n            self._discover_from_paths(paths, ignore_paths or [])\n\n    def clear(self):\n        \"\"\" Clear all discovered items. \"\"\"\n        self.plugins = {}\n        self.kind_map.clear()\n        self.aliases.clear()\n        self.global_param_aliases.clear()\n\n    def reload(self):\n        \"\"\" Clear all discovered items and re-run the discovery. \"\"\"\n        self.logger.debug('Reloading')\n        self.clear()\n        self._discover_from_packages(self.packages)\n        self._discover_from_paths(self.paths, self.ignore_paths)\n\n    def get_plugin_class(self, name, kind=None):\n        \"\"\"\n        Return the class for the specified plugin if found or raises ``ValueError``.\n\n        \"\"\"\n        name, _ = self.resolve_alias(name)\n        if kind is None:\n            try:\n                return self.plugins[name]\n            except KeyError:\n                raise NotFoundError('plugins {} not found.'.format(name))\n        if kind not in self.kind_map:\n            raise ValueError('Unknown plugin type: {}'.format(kind))\n        store = self.kind_map[kind]\n        if name not in store:\n            msg = 'plugins {} is not {} {}.'\n            raise NotFoundError(msg.format(name, get_article(kind), kind))\n        return store[name]\n\n    def get_plugin(self, name=None, kind=None, *args, **kwargs):  # pylint: disable=keyword-arg-before-vararg\n        \"\"\"\n        Return plugin of the specified kind with the specified name. Any\n        additional parameters will be passed to the plugin's __init__.\n\n        \"\"\"\n        name, base_kwargs = self.resolve_alias(name)\n        kwargs = OrderedDict(chain(iter(base_kwargs.items()), iter(kwargs.items())))\n        cls = self.get_plugin_class(name, kind)\n        plugin = cls(*args, **kwargs)\n        return plugin\n\n    def get_default_config(self, name):\n        \"\"\"\n        Returns the default configuration for the specified plugin name. The\n        name may be an alias, in which case, the returned config will be\n        augmented with appropriate alias overrides.\n\n        \"\"\"\n        real_name, alias_config = self.resolve_alias(name)\n        base_default_config = self.get_plugin_class(real_name).get_default_config()\n        return merge_dicts_simple(base_default_config, alias_config)\n\n    def list_plugins(self, kind=None):\n        \"\"\"\n        List discovered plugin classes. Optionally, only list plugins of a\n        particular type.\n\n        \"\"\"\n        if kind is None:\n            return list(self.plugins.values())\n        if kind not in self.kind_map:\n            raise ValueError('Unknown plugin type: {}'.format(kind))\n        return list(self.kind_map[kind].values())\n\n    def has_plugin(self, name, kind=None):\n        \"\"\"\n        Returns ``True`` if an plugins with the specified ``name`` has been\n        discovered by the loader. If ``kind`` was specified, only returns ``True``\n        if the plugin has been found, *and* it is of the specified kind.\n\n        \"\"\"\n        try:\n            self.get_plugin_class(name, kind)\n            return True\n        except NotFoundError:\n            return False\n\n    def resolve_alias(self, alias_name):\n        \"\"\"\n        Try to resolve the specified name as an plugin alias. Returns a\n        two-tuple, the first value of which is actual plugin name, and the\n        iisecond is a dict of parameter values for this alias. If the name passed\n        is already an plugin name, then the result is ``(alias_name, {})``.\n\n        \"\"\"\n        alias_name = identifier(alias_name.lower())\n        if alias_name in self.plugins:\n            return (alias_name, {})\n        if alias_name in self.aliases:\n            alias = self.aliases[alias_name]\n            return (alias.plugin_name, copy(alias.params))\n        raise NotFoundError('Could not find plugin or alias \"{}\"'.format(alias_name))\n\n    # Internal methods.\n\n    def __getattr__(self, name):\n        \"\"\"\n        This resolves methods for specific plugins types based on corresponding\n        generic plugin methods. So it's possible to say things like ::\n\n            loader.get_device('foo')\n\n        instead of ::\n\n            loader.get_plugin('foo', kind='device')\n\n        \"\"\"\n        error_msg = 'No plugins of type \"{}\" discovered'\n        if name.startswith('get_'):\n            name = name.replace('get_', '', 1)\n            if name in self.kind_map:\n                def __wrapper(pname, *args, **kwargs):\n                    return self.get_plugin(pname, name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        if name.startswith('list_'):\n            name = name.replace('list_', '', 1).rstrip('s')\n            if name in self.kind_map:\n                def __wrapper(*args, **kwargs):  # pylint: disable=E0102\n                    return self.list_plugins(name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        if name.startswith('has_'):\n            name = name.replace('has_', '', 1)\n            if name in self.kind_map:\n                def __wrapper(pname, *args, **kwargs):  # pylint: disable=E0102\n                    return self.has_plugin(pname, name, *args, **kwargs)\n                return __wrapper\n            raise NotFoundError(error_msg.format(name))\n        raise AttributeError(name)\n\n    def _discover_from_packages(self, packages):\n        self.logger.debug('Discovering plugins in packages')\n        try:\n            for package in packages:\n                for module in walk_modules(package):\n                    self._discover_in_module(module)\n        except HostError as e:\n            message = 'Problem loading plugins from {}: {}'\n            raise PluginLoaderError(message.format(e.module, str(e.orig_exc)),\n                                    e.exc_info)\n\n    def _discover_from_paths(self, paths, ignore_paths):\n        paths = paths or []\n        ignore_paths = ignore_paths or []\n\n        self.logger.debug('Discovering plugins in paths')\n        for path in paths:\n            self.logger.debug('Checking path %s', path)\n            if os.path.isfile(path):\n                self._discover_from_file(path)\n            elif os.path.exists(path):\n                for root, _, files in os.walk(path, followlinks=True):\n                    should_skip = False\n                    for igpath in ignore_paths:\n                        if root.startswith(igpath):\n                            should_skip = True\n                            break\n                    if should_skip:\n                        continue\n                    for fname in files:\n                        if os.path.splitext(fname)[1].lower() != '.py':\n                            continue\n                        filepath = os.path.join(root, fname)\n                        self._discover_from_file(filepath)\n            elif not os.path.isabs(path):\n                try:\n                    for module in walk_modules(path):\n                        self._discover_in_module(module)\n                except Exception: # NOQA pylint: disable=broad-except\n                    pass\n\n    def _discover_from_file(self, filepath):\n        try:\n            module = import_path(filepath)\n            self._discover_in_module(module)\n        except (SystemExit, ImportError) as e:\n            if self.keep_going:\n                self.logger.warning('Failed to load {}'.format(filepath))\n                self.logger.warning('Got: {}'.format(e))\n            else:\n                msg = 'Failed to load {}'\n                raise PluginLoaderError(msg.format(filepath), sys.exc_info())\n        except Exception as e:\n            message = 'Problem loading plugins from {}: {}'\n            raise PluginLoaderError(message.format(filepath, e))\n\n    def _discover_in_module(self, module):  # NOQA pylint: disable=too-many-branches\n        self.logger.debug('Checking module %s', module.__name__)\n        with log.indentcontext():\n            for obj in vars(module).values():\n                if inspect.isclass(obj):\n                    if not issubclass(obj, Plugin):\n                        continue\n                    if obj.__module__ != module.__name__:\n                        continue\n                    if not obj.kind:\n                        message = 'Skipping plugin {} as it does not define a kind'\n                        self.logger.debug(message.format(obj.__name__))\n                        continue\n                    if not obj.name:\n                        message = 'Skipping {} {} as it does not define a name'\n                        self.logger.debug(message.format(obj.kind, obj.__name__))\n                        continue\n                    try:\n                        self._add_found_plugin(obj)\n                    except PluginLoaderError as e:\n                        if self.keep_going:\n                            self.logger.warning(e)\n                        else:\n                            raise e\n\n    def _add_found_plugin(self, obj):\n        \"\"\"\n            :obj: Found plugin class\n            :ext: matching plugin item.\n        \"\"\"\n        self.logger.debug('Adding %s %s', obj.kind, obj.name)\n        key = identifier(obj.name.lower())\n        if key in self.plugins or key in self.aliases:\n            msg = '{} \"{}\" already exists.'\n            raise PluginLoaderError(msg.format(obj.kind, obj.name))\n        # plugins are tracked both, in a common plugins\n        # dict, and in per-plugin kind dict (as retrieving\n        # plugins by kind is a common use case.\n        self.plugins[key] = obj\n        self.kind_map[obj.kind][key] = obj\n\n        for alias in obj.aliases:\n            alias_id = identifier(alias.name.lower())\n            if alias_id in self.plugins or alias_id in self.aliases:\n                msg = '{} \"{}\" already exists.'\n                raise PluginLoaderError(msg.format(obj.kind, obj.name))\n            self.aliases[alias_id] = alias\n"
  },
  {
    "path": "wa/framework/pluginloader.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport sys\n\n\nclass __LoaderWrapper(object):\n\n    @property\n    def kinds(self):\n        if not self._loader:\n            self.reset()\n        return list(self._loader.kind_map.keys())\n\n    @property\n    def kind_map(self):\n        if not self._loader:\n            self.reset()\n        return self._loader.kind_map\n\n    def __init__(self):\n        self._loader = None\n\n    def reset(self):\n        # These imports cannot be done at top level, because of\n        # sys.modules manipulation below\n        # pylint: disable=import-outside-toplevel\n        from wa.framework.plugin import PluginLoader\n        from wa.framework.configuration.core import settings\n        self._loader = PluginLoader(settings.plugin_packages,\n                                    settings.plugin_paths, [])\n\n    def update(self, packages=None, paths=None, ignore_paths=None):\n        if not self._loader:\n            self.reset()\n        self._loader.update(packages, paths, ignore_paths)\n\n    def reload(self):\n        if not self._loader:\n            self.reset()\n        self._loader.reload()\n\n    def list_plugins(self, kind=None):\n        if not self._loader:\n            self.reset()\n        return self._loader.list_plugins(kind)\n\n    def has_plugin(self, name, kind=None):\n        if not self._loader:\n            self.reset()\n        return self._loader.has_plugin(name, kind)\n\n    def get_plugin_class(self, name, kind=None):\n        if not self._loader:\n            self.reset()\n        return self._loader.get_plugin_class(name, kind)\n\n    def get_plugin(self, name=None, kind=None, *args, **kwargs):  # pylint: disable=keyword-arg-before-vararg\n        if not self._loader:\n            self.reset()\n        return self._loader.get_plugin(name=name, kind=kind, *args, **kwargs)\n\n    def get_default_config(self, name):\n        if not self._loader:\n            self.reset()\n        return self._loader.get_default_config(name)\n\n    def resolve_alias(self, name):\n        if not self._loader:\n            self.reset()\n        return self._loader.resolve_alias(name)\n\n    def __getattr__(self, name):\n        if not self._loader:\n            self.reset()\n        return getattr(self._loader, name)\n\n\nsys.modules[__name__] = __LoaderWrapper()\n"
  },
  {
    "path": "wa/framework/resource.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport logging\nimport os\nimport re\n\nfrom wa.framework import pluginloader\nfrom wa.framework.plugin import Plugin\nfrom wa.framework.exception import ResourceError\nfrom wa.framework.configuration import settings\nfrom wa.utils import log\nfrom wa.utils.android import get_cacheable_apk_info\nfrom wa.utils.misc import get_object_name\nfrom wa.utils.types import enum, list_or_string, prioritylist, version_tuple\n\n\nSourcePriority = enum(['package', 'remote', 'lan', 'local',\n                       'perferred'], start=0, step=10)\n\n\nclass __NullOwner(object):\n    \"\"\"Represents an owner for a resource not owned by anyone.\"\"\"\n\n    name = 'noone'\n    dependencies_directory = settings.dependencies_directory\n\n    def __getattr__(self, name):\n        return None\n\n    def __str__(self):\n        return 'no-one'\n\n    __repr__ = __str__\n\n\nNO_ONE = __NullOwner()\n\n\nclass Resource(object):\n    \"\"\"\n    Represents a resource that needs to be resolved. This can be pretty much\n    anything: a file, environment variable, a Python object, etc. The only\n    thing a resource *has* to have is an owner (which would normally be the\n    Workload/Instrument/Device/etc object that needs the resource). In\n    addition, a resource have any number of attributes to identify, but all of\n    them are resource type specific.\n\n    \"\"\"\n\n    kind = None\n\n    def __init__(self, owner=NO_ONE):\n        self.owner = owner\n\n    def match(self, path):\n        return self.match_path(path)\n\n    def match_path(self, path):\n        raise NotImplementedError()\n\n    def __str__(self):\n        return '<{}\\'s {}>'.format(self.owner, self.kind)\n\n\nclass File(Resource):\n\n    kind = 'file'\n\n    def __init__(self, owner, path):\n        super(File, self).__init__(owner)\n        self.path = path\n\n    def match_path(self, path):\n        return self.path == path\n\n    def __str__(self):\n        return '<{}\\'s {} {} file>'.format(self.owner, self.kind, self.path)\n\n\nclass Executable(Resource):\n\n    kind = 'executable'\n\n    def __init__(self, owner, abi, filename):\n        super(Executable, self).__init__(owner)\n        self.abi = abi\n        self.filename = filename\n\n    def match_path(self, path):\n        return self.filename == os.path.basename(path)\n\n    def __str__(self):\n        return '<{}\\'s {} {} executable>'.format(self.owner, self.abi, self.filename)\n\n\nclass ReventFile(Resource):\n\n    kind = 'revent'\n\n    def __init__(self, owner, stage, target):\n        super(ReventFile, self).__init__(owner)\n        self.stage = stage\n        self.target = target\n\n    def match_path(self, path):\n        filename = os.path.basename(path)\n        parts = filename.split('.')\n        if len(parts) > 2:\n            target, stage = parts[:2]\n            return target == self.target and stage == self.stage\n        else:\n            stage = parts[0]\n            return stage == self.stage\n\n\nclass JarFile(Resource):\n\n    kind = 'jar'\n\n    def match_path(self, path):\n        # An owner always  has at most one jar file, so\n        # always match\n        return True\n\n\nclass ApkFile(Resource):\n\n    kind = 'apk'\n\n    def __init__(self, owner, variant=None, version=None,\n                 package=None, uiauto=False, exact_abi=False,\n                 supported_abi=None, min_version=None, max_version=None):\n        super(ApkFile, self).__init__(owner)\n        self.variant = variant\n        self.version = version\n        self.max_version = max_version\n        self.min_version = min_version\n        self.package = package\n        self.uiauto = uiauto\n        self.exact_abi = exact_abi\n        self.supported_abi = supported_abi\n\n    def match_path(self, path):\n        ext = os.path.splitext(path)[1].lower()\n        return ext == '.apk'\n\n    def match(self, path):\n        name_matches = True\n        version_matches = True\n        version_range_matches = True\n        package_matches = True\n        abi_matches = True\n        uiauto_matches = uiauto_test_matches(path, self.uiauto)\n        if self.version:\n            version_matches = apk_version_matches(path, self.version)\n        if self.max_version or self.min_version:\n            version_range_matches = apk_version_matches_range(path, self.min_version,\n                                                              self.max_version)\n        if self.variant:\n            name_matches = file_name_matches(path, self.variant)\n        if self.package:\n            package_matches = package_name_matches(path, self.package)\n        if self.supported_abi:\n            abi_matches = apk_abi_matches(path, self.supported_abi,\n                                          self.exact_abi)\n        return name_matches and version_matches and \\\n            version_range_matches and uiauto_matches \\\n            and package_matches and abi_matches\n\n    def __str__(self):\n        text = '<{}\\'s apk'.format(self.owner)\n        if self.variant:\n            text += ' {}'.format(self.variant)\n        if self.version:\n            text += ' {}'.format(self.version)\n        if self.uiauto:\n            text += 'uiautomator test'\n        text += '>'\n        return text\n\n\nclass ResourceGetter(Plugin):\n    \"\"\"\n    Base class for implementing resolvers. Defines resolver\n    interface. Resolvers are responsible for discovering resources (such as\n    particular kinds of files) they know about based on the parameters that are\n    passed to them. Each resolver also has a dict of attributes that describe\n    it's operation, and may be used to determine which get invoked.  There is\n    no pre-defined set of attributes and resolvers may define their own.\n\n    Class attributes:\n\n    :name: Name that uniquely identifies this getter. Must be set by any\n           concrete subclass.\n    :priority: Priority with which this getter will be invoked. This should\n               be one of the standard priorities specified in\n               ``GetterPriority`` enumeration. If not set, this will default\n               to ``GetterPriority.environment``.\n\n    \"\"\"\n\n    name = None\n    kind = 'resource_getter'\n\n    def register(self, resolver):\n        raise NotImplementedError()\n\n    def initialize(self):\n        pass\n\n    def __str__(self):\n        return '<ResourceGetter {}>'.format(self.name)\n\n\nclass ResourceResolver(object):\n    \"\"\"\n    Discovers and registers getters, and then handles requests for\n    resources using registered getters.\n\n    \"\"\"\n\n    def __init__(self, loader=pluginloader):\n        self.loader = loader\n        self.logger = logging.getLogger('resolver')\n        self.getters = []\n        self.sources = prioritylist()\n\n    def load(self):\n        for gettercls in self.loader.list_plugins('resource_getter'):\n            self.logger.debug('Loading getter {}'.format(gettercls.name))\n            getter = self.loader.get_plugin(name=gettercls.name,\n                                            kind=\"resource_getter\")\n            with log.indentcontext():\n                getter.initialize()\n                getter.register(self)\n            self.getters.append(getter)\n\n    def register(self, source, priority=SourcePriority.local):\n        msg = 'Registering \"{}\" with priority \"{}\"'\n        self.logger.debug(msg.format(get_object_name(source), priority))\n        self.sources.add(source, priority)\n\n    def get(self, resource, strict=True):\n        \"\"\"\n        Uses registered getters to attempt to discover a resource of the specified\n        kind and matching the specified criteria. Returns path to the resource that\n        has been discovered. If a resource has not been discovered, this will raise\n        a ``ResourceError`` or, if ``strict`` has been set to ``False``, will return\n        ``None``.\n\n        \"\"\"\n        self.logger.debug('Resolving {}'.format(resource))\n        for source in self.sources:\n            source_name = get_object_name(source)\n            self.logger.debug('Trying {}'.format(source_name))\n            result = source(resource)\n            if result is not None:\n                msg = 'Resource {} found using {}:'\n                self.logger.debug(msg.format(resource, source_name))\n                self.logger.debug('\\t{}'.format(result))\n                return result\n        if strict:\n            raise ResourceError('{} could not be found'.format(resource))\n        self.logger.debug('Resource {} not found.'.format(resource))\n        return None\n\n\ndef apk_version_matches(path, version):\n    version = list_or_string(version)\n    info = get_cacheable_apk_info(path)\n    for v in version:\n        if v in (info.version_name, info.version_code):\n            return True\n        if loose_version_matching(v, info.version_name):\n            return True\n    return False\n\n\ndef apk_version_matches_range(path, min_version=None, max_version=None):\n    info = get_cacheable_apk_info(path)\n    return range_version_matching(info.version_name, min_version, max_version)\n\n\ndef range_version_matching(apk_version, min_version=None, max_version=None):\n    if not apk_version:\n        return False\n    apk_version = version_tuple(apk_version)\n\n    if max_version:\n        max_version = version_tuple(max_version)\n        if apk_version > max_version:\n            return False\n    if min_version:\n        min_version = version_tuple(min_version)\n        if apk_version < min_version:\n            return False\n    return True\n\n\ndef loose_version_matching(config_version, apk_version):\n    config_version = version_tuple(config_version)\n    apk_version = version_tuple(apk_version)\n\n    if len(apk_version) < len(config_version):\n        return False  # More specific version requested than available\n\n    for i in range(len(config_version)):\n        if config_version[i] != apk_version[i]:\n            return False\n    return True\n\n\ndef file_name_matches(path, pattern):\n    filename = os.path.basename(path)\n    if pattern in filename:\n        return True\n    if re.search(pattern, filename):\n        return True\n    return False\n\n\ndef uiauto_test_matches(path, uiauto):\n    info = get_cacheable_apk_info(path)\n    return uiauto == ('com.arm.wa.uiauto' in info.package)\n\n\ndef package_name_matches(path, package):\n    info = get_cacheable_apk_info(path)\n    return info.package == package\n\n\ndef apk_abi_matches(path, supported_abi, exact_abi=False):\n    supported_abi = list_or_string(supported_abi)\n    info = get_cacheable_apk_info(path)\n    # If no native code present, suitable for all devices.\n    if not info.native_code:\n        return True\n\n    if exact_abi:  # Only check primary\n        return supported_abi[0] in info.native_code\n    else:\n        for abi in supported_abi:\n            if abi in info.native_code:\n                return True\n    return False\n"
  },
  {
    "path": "wa/framework/run.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# Because of use of Enum (dynamic attrs)\n# pylint: disable=no-member\n\nimport uuid\nfrom collections import OrderedDict, Counter\nfrom copy import copy\nfrom datetime import datetime, timedelta\n\nfrom wa.framework.configuration.core import Status\nfrom wa.utils.serializer import Podable\n\n\nclass RunInfo(Podable):\n    \"\"\"\n    Information about the current run, such as its unique ID, run\n    time, etc.\n\n    \"\"\"\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = RunInfo._upgrade_pod(pod)\n        uid = pod.pop('uuid')\n        _pod_version = pod.pop('_pod_version')\n        duration = pod.pop('duration')\n        if uid is not None:\n            uid = uuid.UUID(uid)\n        instance = RunInfo(**pod)\n        instance._pod_version = _pod_version  # pylint: disable=protected-access\n        instance.uuid = uid\n        instance.duration = duration if duration is None else timedelta(seconds=duration)\n        return instance\n\n    def __init__(self, run_name=None, project=None, project_stage=None,\n                 start_time=None, end_time=None, duration=None):\n        super(RunInfo, self).__init__()\n        self.uuid = uuid.uuid4()\n        self.run_name = run_name\n        self.project = project\n        self.project_stage = project_stage\n        self.start_time = start_time\n        self.end_time = end_time\n        self.duration = duration\n\n    def to_pod(self):\n        d = super(RunInfo, self).to_pod()\n        d.update(copy(self.__dict__))\n        d['uuid'] = str(self.uuid)\n        if self.duration is None:\n            d['duration'] = self.duration\n        else:\n            d['duration'] = self.duration.total_seconds()\n        return d\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n\nclass RunState(Podable):\n    \"\"\"\n    Represents the state of a WA run.\n\n    \"\"\"\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        instance = super(RunState, RunState).from_pod(pod)\n        instance.status = Status.from_pod(pod['status'])\n        instance.timestamp = pod['timestamp']\n        jss = [JobState.from_pod(j) for j in pod['jobs']]\n        instance.jobs = OrderedDict(((js.id, js.iteration), js) for js in jss)\n        return instance\n\n    @property\n    def num_completed_jobs(self):\n        return sum(1 for js in self.jobs.values()\n                   if js.status > Status.RUNNING)\n\n    def __init__(self):\n        super(RunState, self).__init__()\n        self.jobs = OrderedDict()\n        self.status = Status.NEW\n        self.timestamp = datetime.utcnow()\n\n    def add_job(self, job):\n        self.jobs[(job.state.id, job.state.iteration)] = job.state\n\n    def get_status_counts(self):\n        counter = Counter()\n        for job_state in self.jobs.values():\n            counter[job_state.status] += 1\n        return counter\n\n    def to_pod(self):\n        pod = super(RunState, self).to_pod()\n        pod['status'] = self.status.to_pod()\n        pod['timestamp'] = self.timestamp\n        pod['jobs'] = [j.to_pod() for j in self.jobs.values()]\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        pod['status'] = Status(pod['status']).to_pod()\n        return pod\n\n\nclass JobState(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = JobState._upgrade_pod(pod)\n        instance = JobState(pod['id'], pod['label'], pod['iteration'],\n                            Status.from_pod(pod['status']))\n        instance.retries = pod['retries']\n        instance.timestamp = pod['timestamp']\n        return instance\n\n    @property\n    def output_name(self):\n        return '{}-{}-{}'.format(self.id, self.label, self.iteration)\n\n    def __init__(self, id, label, iteration, status):\n        # pylint: disable=redefined-builtin\n        super(JobState, self).__init__()\n        self.id = id\n        self.label = label\n        self.iteration = iteration\n        self.status = status\n        self.retries = 0\n        self.timestamp = datetime.utcnow()\n\n    def to_pod(self):\n        pod = super(JobState, self).to_pod()\n        pod['id'] = self.id\n        pod['label'] = self.label\n        pod['iteration'] = self.iteration\n        pod['status'] = self.status.to_pod()\n        pod['retries'] = self.retries\n        pod['timestamp'] = self.timestamp\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        pod['status'] = Status(pod['status']).to_pod()\n        return pod\n"
  },
  {
    "path": "wa/framework/signal.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nThis module wraps louie signalling mechanism. It relies on modified version of louie\nthat has prioritization added to handler invocation.\n\n\"\"\"\nimport sys\nimport logging\nfrom contextlib import contextmanager\n\nfrom louie import dispatcher, saferef  # pylint: disable=wrong-import-order\nfrom louie.dispatcher import _remove_receiver\nimport wrapt\n\nfrom wa.utils.types import prioritylist, enum\n\n\nlogger = logging.getLogger('signal')\n\n\nclass Signal(object):\n    \"\"\"\n    This class implements the signals to be used for notifiying callbacks\n    registered to respond to different states and stages of the execution of workload\n    automation.\n\n    \"\"\"\n\n    def __init__(self, name, description='no description', invert_priority=False):\n        \"\"\"\n        Instantiates a Signal.\n\n            :param name: name is the identifier of the Signal object. Signal instances with\n                        the same name refer to the same execution stage/stage.\n            :param invert_priority: boolean parameter that determines whether multiple\n                                    callbacks for the same signal should be\n                                    ordered with ascending or descending\n                                    priorities. Typically this flag should be\n                                    set to True if the Signal is triggered\n                                    AFTER an a state/stage has been reached.\n                                    That way callbacks with high priorities\n                                    will be called right after the event has\n                                    occured.\n        \"\"\"\n        self.name = name\n        self.description = description\n        self.invert_priority = invert_priority\n\n    def __str__(self):\n        return self.name\n\n    __repr__ = __str__\n\n    def __hash__(self):\n        return id(self.name)\n\n\n# Signals associated with run-related events\nRUN_STARTED = Signal('run-started', 'sent at the beginning of the run')\nRUN_INITIALIZED = Signal('run-initialized', 'set after the run has been initialized')\nRUN_ABORTED = Signal('run-aborted', 'set when the run has been aborted due to a keyboard interrupt')\nRUN_FAILED = Signal('run-failed', 'set if the run has failed to complete all jobs.')\nRUN_FINALIZED = Signal('run-finalized', 'set after the run has been finalized')\nRUN_COMPLETED = Signal('run-completed', 'set upon completion of the run (regardless of whether or not it has failed')\n\n\n# Signals associated with job-related events\nJOB_STARTED = Signal('job-started', 'set when a a new job has been started')\nJOB_ABORTED = Signal('job-aborted',\n                     description='''\n                     sent if a job has been aborted due to a keyboard interrupt.\n\n                     .. note:: While the status of every job that has not had a\n                               chance to run due to being interrupted will be\n                               set to \"ABORTED\", this signal will only be sent\n                               for the job that was actually running at the\n                               time.\n\n                     ''')\nJOB_FAILED = Signal('job-failed', description='set if the job has failed')\nJOB_RESTARTED = Signal('job-restarted')\nJOB_COMPLETED = Signal('job-completed')\n\n\n# Signals associated with particular stages of workload execution\nBEFORE_WORKLOAD_INITIALIZED = Signal('before-workload-initialized',\n                                     invert_priority=True)\nSUCCESSFUL_WORKLOAD_INITIALIZED = Signal('successful-workload-initialized')\nAFTER_WORKLOAD_INITIALIZED = Signal('after-workload-initialized')\n\nBEFORE_WORKLOAD_SETUP = Signal('before-workload-setup', invert_priority=True)\nSUCCESSFUL_WORKLOAD_SETUP = Signal('successful-workload-setup')\nAFTER_WORKLOAD_SETUP = Signal('after-workload-setup')\n\nBEFORE_WORKLOAD_EXECUTION = Signal('before-workload-execution', invert_priority=True)\nSUCCESSFUL_WORKLOAD_EXECUTION = Signal('successful-workload-execution')\nAFTER_WORKLOAD_EXECUTION = Signal('after-workload-execution')\n\nBEFORE_WORKLOAD_RESULT_EXTRACTION = Signal('before-workload-result-extracton', invert_priority=True)\nSUCCESSFUL_WORKLOAD_RESULT_EXTRACTION = Signal('successful-workload-result-extracton')\nAFTER_WORKLOAD_RESULT_EXTRACTION = Signal('after-workload-result-extracton')\n\nBEFORE_WORKLOAD_OUTPUT_UPDATE = Signal('before-workload-output-update',\n                                       invert_priority=True)\nSUCCESSFUL_WORKLOAD_OUTPUT_UPDATE = Signal('successful-workload-output-update')\nAFTER_WORKLOAD_OUTPUT_UPDATE = Signal('after-workload-output-update')\n\nBEFORE_WORKLOAD_TEARDOWN = Signal('before-workload-teardown', invert_priority=True)\nSUCCESSFUL_WORKLOAD_TEARDOWN = Signal('successful-workload-teardown')\nAFTER_WORKLOAD_TEARDOWN = Signal('after-workload-teardown')\n\nBEFORE_WORKLOAD_FINALIZED = Signal('before-workload-finalized', invert_priority=True)\nSUCCESSFUL_WORKLOAD_FINALIZED = Signal('successful-workload-finalized')\nAFTER_WORKLOAD_FINALIZED = Signal('after-workload-finalized')\n\n# Signals indicating exceptional conditions\nERROR_LOGGED = Signal('error-logged')\nWARNING_LOGGED = Signal('warning-logged')\n\n# These are paired events -- if the before_event is sent, the after_ signal is\n# guaranteed to also be sent. In particular, the after_ signals will be sent\n# even if there is an error, so you cannot assume in the handler that the\n# device has booted successfully. In most cases, you should instead use the\n# non-paired signals below.\nBEFORE_RUN_INIT = Signal('before-run-init', invert_priority=True)\nSUCCESSFUL_RUN_INIT = Signal('successful-run-init')\nAFTER_RUN_INIT = Signal('after-run-init')\n\nBEFORE_JOB = Signal('before-job', invert_priority=True)\nSUCCESSFUL_JOB = Signal('successful-job')\nAFTER_JOB = Signal('after-job')\n\nBEFORE_JOB_QUEUE_EXECUTION = Signal('before-job-queue-execution', invert_priority=True)\nSUCCESSFUL_JOB_QUEUE_EXECUTION = Signal('successful-job-queue-execution')\nAFTER_JOB_QUEUE_EXECUTION = Signal('after-job-queue-execution')\n\nBEFORE_JOB_TARGET_CONFIG = Signal('before-job-target-config', invert_priority=True)\nSUCCESSFUL_JOB_TARGET_CONFIG = Signal('successful-job-target-config')\nAFTER_JOB_TARGET_CONFIG = Signal('after-job-target-config')\n\nBEFORE_JOB_OUTPUT_PROCESSED = Signal('before-job-output-processed',\n                                     invert_priority=True)\nSUCCESSFUL_JOB_OUTPUT_PROCESSED = Signal('successful-job-output-processed')\nAFTER_JOB_OUTPUT_PROCESSED = Signal('after-job-output-processed')\n\nBEFORE_FLASHING = Signal('before-flashing', invert_priority=True)\nSUCCESSFUL_FLASHING = Signal('successful-flashing')\nAFTER_FLASHING = Signal('after-flashing')\n\nBEFORE_REBOOT = Signal('before-reboot', invert_priority=True)\nSUCCESSFUL_REBOOT = Signal('successful-reboot')\nAFTER_REBOOT = Signal('after-reboot')\n\nBEFORE_TARGET_CONNECT = Signal('before-target-connect', invert_priority=True)\nSUCCESSFUL_TARGET_CONNECT = Signal('successful-target-connect')\nAFTER_TARGET_CONNECT = Signal('after-target-connect')\n\nBEFORE_TARGET_DISCONNECT = Signal('before-target-disconnect', invert_priority=True)\nSUCCESSFUL_TARGET_DISCONNECT = Signal('successful-target-disconnect')\nAFTER_TARGET_DISCONNECT = Signal('after-target-disconnect')\n\n\nBEFORE_RUN_OUTPUT_PROCESSED = Signal(\n    'before-run-output-processed', invert_priority=True)\nSUCCESSFUL_RUN_OUTPUT_PROCESSED = Signal(\n    'successful-run-output-processed')\nAFTER_RUN_OUTPUT_PROCESSED = Signal(\n    'after-run-output-processed')\n\n\nCallbackPriority = enum(['extremely_low', 'very_low', 'low', 'normal',\n                         'high', 'very_high', 'extremely_high'], -30, 10)\n\n\nclass _prioritylist_wrapper(prioritylist):\n    \"\"\"\n    This adds a NOP append() method so that when louie invokes it to add the\n    handler to receivers, nothing will happen; the handler is actually added inside\n    the connect() below according to priority, before louie's connect() gets invoked.\n\n    \"\"\"\n\n    def append(self, *args, **kwargs):\n        pass\n\n\ndef connect(handler, signal, sender=dispatcher.Any, priority=0):\n    \"\"\"\n    Connects a callback to a signal, so that the callback will be automatically invoked\n    when that signal is sent.\n\n    Parameters:\n\n        :handler: This can be any callable that that takes the right arguments for\n                  the signal. For most signals this means a single argument that\n                  will be an ``ExecutionContext`` instance. But please see documentation\n                  for individual signals in the :ref:`signals reference <instruments_method_map>`.\n        :signal: The signal to which the handler will be subscribed. Please see\n                 :ref:`signals reference <instruments_method_map>` for the list of standard WA\n                 signals.\n\n                 .. note:: There is nothing that prevents instruments from sending their\n                           own signals that are not part of the standard set. However the signal\n                           must always be an :class:`wa.core.signal.Signal` instance.\n\n        :sender: The handler will be invoked only for the signals emitted by this sender. By\n                 default, this is set to :class:`louie.dispatcher.Any`, so the handler will\n                 be invoked for signals from any sender.\n        :priority: An integer (positive or negative) the specifies the priority of the handler.\n                   Handlers with higher priority will be called before handlers with lower\n                   priority. The  call order of handlers with the same priority is not specified.\n                   Defaults to 0.\n\n                   .. note:: Priorities for some signals are inverted (so highest priority\n                             handlers get executed last). Please see :ref:`signals reference <instruments_method_map>`\n                             for details.\n\n    \"\"\"\n    logger.debug('Connecting {} to {}({}) with priority {}'.format(handler, signal, sender, priority))\n    if getattr(signal, 'invert_priority', False):\n        priority = -priority\n    senderkey = id(sender)\n    if senderkey in dispatcher.connections:\n        signals = dispatcher.connections[senderkey]\n    else:\n        dispatcher.connections[senderkey] = signals = {}\n    if signal in signals:\n        receivers = signals[signal]\n    else:\n        receivers = signals[signal] = _prioritylist_wrapper()\n    dispatcher.connect(handler, signal, sender)\n    receivers.add(saferef.safe_ref(handler, on_delete=_remove_receiver), priority)\n\n\ndef disconnect(handler, signal, sender=dispatcher.Any):\n    \"\"\"\n    Disconnect a previously connected handler form the specified signal, optionally, only\n    for the specified sender.\n\n    Parameters:\n\n        :handler: The callback to be disconnected.\n        :signal: The signal the handler is to be disconnected form. It will\n                 be an :class:`wa.core.signal.Signal` instance.\n        :sender: If specified, the handler will only be disconnected from the signal\n                sent by this sender.\n\n    \"\"\"\n    logger.debug('Disconnecting {} from {}({})'.format(handler, signal, sender))\n    dispatcher.disconnect(handler, signal, sender)\n\n\ndef send(signal, sender=dispatcher.Anonymous, *args, **kwargs):\n    \"\"\"\n    Sends a signal, causing connected handlers to be invoked.\n\n    Parameters:\n\n        :signal: Signal to be sent. This must be an instance of :class:`wa.core.signal.Signal`\n                 or its subclasses.\n        :sender: The sender of the signal (typically, this would be ``self``). Some handlers may only\n                 be subscribed to signals from a particular sender.\n\n        The rest of the parameters will be passed on as aruments to the handler.\n\n    \"\"\"\n    logger.debug('Sending {} from {}'.format(signal, sender))\n    return dispatcher.send(signal, sender, *args, **kwargs)\n\n\n# This will normally be set to log_error() by init_logging(); see wa.utils.log\n# Done this way to prevent a circular import dependency.\nlog_error_func = logger.error\n\n\ndef safe_send(signal, sender=dispatcher.Anonymous,\n              propagate=None, *args, **kwargs):\n    \"\"\"\n    Same as ``send``, except this will catch and log all exceptions raised\n    by handlers, except those specified in ``propagate`` argument (defaults\n    to just ``[KeyboardInterrupt]``).\n    \"\"\"\n    if propagate is None:\n        propagate = [KeyboardInterrupt]\n    try:\n        logger.debug('Safe-sending {} from {}'.format(signal, sender))\n        send(signal, sender, *args, **kwargs)\n    except Exception as e:  # pylint: disable=broad-except\n        if any(isinstance(e, p) for p in propagate):\n            raise e\n        log_error_func(e)\n\n\n@contextmanager\ndef wrap(signal_name, sender=dispatcher.Anonymous, *args, **kwargs):  # pylint: disable=keyword-arg-before-vararg\n    \"\"\"Wraps the suite in before/after signals, ensuring\n    that after signal is always sent.\"\"\"\n    safe = kwargs.pop('safe', False)\n    signal_name = signal_name.upper().replace('-', '_')\n    send_func = safe_send if safe else send\n    try:\n        before_signal = globals()['BEFORE_' + signal_name]\n        success_signal = globals()['SUCCESSFUL_' + signal_name]\n        after_signal = globals()['AFTER_' + signal_name]\n    except KeyError:\n        raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))\n    try:\n        send_func(before_signal, sender, *args, **kwargs)\n        yield\n        send_func(success_signal, sender, *args, **kwargs)\n    finally:\n        _, exc, _ = sys.exc_info()\n        if exc:\n            log_error_func(exc)\n        send_func(after_signal, sender, *args, **kwargs)\n\n\ndef wrapped(signal_name, sender=dispatcher.Anonymous, safe=False):\n    \"\"\"A decorator for wrapping function in signal dispatch.\"\"\"\n    @wrapt.decorator\n    def signal_wrapped(wrapped_func, _, args, kwargs):\n        def signal_wrapper(*args, **kwargs):\n            with wrap(signal_name, sender, safe):\n                return wrapped_func(*args, **kwargs)\n\n        return signal_wrapper(*args, **kwargs)\n\n    return signal_wrapped\n"
  },
  {
    "path": "wa/framework/target/__init__.py",
    "content": ""
  },
  {
    "path": "wa/framework/target/assistant.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\nimport os\nimport shutil\nimport sys\nimport tempfile\nimport threading\nimport time\n\nfrom wa.framework.exception import WorkerThreadError\nfrom wa.framework.plugin import Parameter\nfrom wa.utils.android import LogcatParser\nfrom wa.utils.misc import touch\nimport wa.framework.signal as signal\n\n\nclass LinuxAssistant(object):\n\n    parameters = []\n\n    def __init__(self, target):\n        self.target = target\n\n    def initialize(self):\n        pass\n\n    def start(self):\n        pass\n\n    def extract_results(self, context):\n        pass\n\n    def stop(self):\n        pass\n\n    def finalize(self):\n        pass\n\n\nclass AndroidAssistant(object):\n\n    parameters = [\n        Parameter('disable_selinux', kind=bool, default=True,\n                  description=\"\"\"\n                  If ``True``, the default, and the target is rooted, an attempt will\n                  be made to disable SELinux by running ``setenforce 0`` on the target\n                  at the beginning of the run.\n                  \"\"\"),\n        Parameter('logcat_poll_period', kind=int,\n                  constraint=lambda x: x > 0,\n                  description=\"\"\"\n                  Polling period for logcat in seconds. If not specified,\n                  no polling will be used.\n\n                  Logcat buffer on android is of limited size and it cannot be\n                  adjusted at run time. Depending on the amount of logging activity,\n                  the buffer may not be enought to capture comlete trace for a\n                  workload execution. For those situations, logcat may be polled\n                  periodically during the course of the run and stored in a\n                  temporary locaiton on the host. Setting the value of the poll\n                  period enables this behavior.\n                  \"\"\"),\n        Parameter('stay_on_mode', kind=int,\n                  constraint=lambda x: 0 <= x <= 7,\n                  description=\"\"\"\n                  Specify whether the screen should stay on while the device is\n                  charging:\n\n                    0: never stay on\n                    1: with AC charger\n                    2: with USB charger\n                    4: with wireless charger\n\n                  Values can be OR-ed together to produce combinations, for\n                  instance ``7`` will cause the screen to stay on when charging\n                  under any method.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, logcat_poll_period=None, disable_selinux=True, stay_on_mode=None):\n        self.target = target\n        self.logcat_poll_period = logcat_poll_period\n        self.disable_selinux = disable_selinux\n        self.stay_on_mode = stay_on_mode\n        self.orig_stay_on_mode = self.target.get_stay_on_mode() if stay_on_mode is not None else None\n        self.logcat_poller = None\n        self.logger = logging.getLogger('logcat')\n        self._logcat_marker_msg = None\n        self._logcat_marker_tag = None\n        signal.connect(self._before_workload, signal.BEFORE_WORKLOAD_EXECUTION)\n        if self.logcat_poll_period:\n            signal.connect(self._after_workload, signal.AFTER_WORKLOAD_EXECUTION)\n\n    def initialize(self):\n        if self.target.is_rooted and self.disable_selinux:\n            self.do_disable_selinux()\n        if self.stay_on_mode is not None:\n            self.target.set_stay_on_mode(self.stay_on_mode)\n\n    def start(self):\n        if self.logcat_poll_period:\n            self.logcat_poller = LogcatPoller(self.target, self.logcat_poll_period)\n            self.logcat_poller.start()\n        else:\n            if not self._logcat_marker_msg:\n                self._logcat_marker_msg = 'WA logcat marker for wrap detection'\n                self._logcat_marker_tag = 'WAlog'\n\n    def stop(self):\n        if self.logcat_poller:\n            self.logcat_poller.stop()\n\n    def finalize(self):\n        if self.stay_on_mode is not None:\n            self.target.set_stay_on_mode(self.orig_stay_on_mode)\n\n    def extract_results(self, context):\n        logcat_file = os.path.join(context.output_directory, 'logcat.log')\n        self.dump_logcat(logcat_file)\n        context.add_artifact('logcat', logcat_file, kind='log')\n        self.clear_logcat()\n        if not self._check_logcat_nowrap(logcat_file):\n            self.logger.warning('The main logcat buffer wrapped and lost data;'\n                                ' results that rely on this buffer may be'\n                                ' inaccurate or incomplete.'\n                                )\n\n    def dump_logcat(self, outfile):\n        if self.logcat_poller:\n            self.logcat_poller.write_log(outfile)\n        else:\n            self.target.dump_logcat(outfile, logcat_format='threadtime')\n\n    def clear_logcat(self):\n        if self.logcat_poller:\n            self.logcat_poller.clear_buffer()\n        else:\n            self.target.clear_logcat()\n\n    def _before_workload(self, _):\n        if self.logcat_poller:\n            self.logcat_poller.start_logcat_wrap_detect()\n        else:\n            self.insert_logcat_marker()\n\n    def _after_workload(self, _):\n        self.logcat_poller.stop_logcat_wrap_detect()\n\n    def _check_logcat_nowrap(self, outfile):\n        if self.logcat_poller:\n            return self.logcat_poller.check_logcat_nowrap(outfile)\n        else:\n            parser = LogcatParser()\n            for event in parser.parse(outfile):\n                if (event.tag == self._logcat_marker_tag\n                        and event.message == self._logcat_marker_msg):\n                    return True\n\n            return False\n\n    def insert_logcat_marker(self):\n        self.logger.debug('Inserting logcat marker')\n        self.target.execute(\n            'log -t \"{}\" \"{}\"'.format(\n                self._logcat_marker_tag, self._logcat_marker_msg\n            )\n        )\n\n    def do_disable_selinux(self):\n        # SELinux was added in Android 4.3 (API level 18). Trying to\n        # 'getenforce' in earlier versions will produce an error.\n        if self.target.get_sdk_version() >= 18:\n            se_status = self.target.execute('getenforce', as_root=True).strip()\n            if se_status == 'Enforcing':\n                self.target.execute('setenforce 0', as_root=True, check_exit_code=False)\n\n\nclass LogcatPoller(threading.Thread):\n\n    def __init__(self, target, period=60, timeout=30):\n        super(LogcatPoller, self).__init__()\n        self.target = target\n        self.logger = logging.getLogger('logcat')\n        self.period = period\n        self.timeout = timeout\n        self.stop_signal = threading.Event()\n        self.lock = threading.RLock()\n        self.buffer_file = tempfile.mktemp()\n        self.last_poll = 0\n        self.daemon = True\n        self.exc = None\n        self._logcat_marker_tag = 'WALog'\n        self._logcat_marker_msg = 'WA logcat marker for wrap detection:{}'\n        self._marker_count = 0\n        self._start_marker = None\n        self._end_marker = None\n\n    def run(self):\n        self.logger.debug('Starting polling')\n        try:\n            self.insert_logcat_marker()\n            while True:\n                if self.stop_signal.is_set():\n                    break\n                with self.lock:\n                    current_time = time.time()\n                    if (current_time - self.last_poll) >= self.period:\n                        self.poll()\n                        self.insert_logcat_marker()\n                time.sleep(0.5)\n        except Exception:  # pylint: disable=W0703\n            self.exc = WorkerThreadError(self.name, sys.exc_info())\n        self.logger.debug('Polling stopped')\n\n    def stop(self):\n        self.logger.debug('Stopping logcat polling')\n        self.stop_signal.set()\n        self.join(self.timeout)\n        if self.is_alive():\n            self.logger.error('Could not join logcat poller thread.')\n        if self.exc:\n            raise self.exc  # pylint: disable=E0702\n\n    def clear_buffer(self):\n        self.logger.debug('Clearing logcat buffer')\n        with self.lock:\n            self.target.clear_logcat()\n            touch(self.buffer_file)\n\n    def write_log(self, outfile):\n        with self.lock:\n            self.poll()\n            if os.path.isfile(self.buffer_file):\n                shutil.copy(self.buffer_file, outfile)\n            else:  # there was no logcat trace at this time\n                touch(outfile)\n\n    def close(self):\n        self.logger.debug('Closing poller')\n        if os.path.isfile(self.buffer_file):\n            os.remove(self.buffer_file)\n\n    def poll(self):\n        self.last_poll = time.time()\n        self.target.dump_logcat(self.buffer_file, append=True, timeout=self.timeout, logcat_format='threadtime')\n        self.target.clear_logcat()\n\n    def insert_logcat_marker(self):\n        self.logger.debug('Inserting logcat marker')\n        with self.lock:\n            self.target.execute(\n                'log -t \"{}\" \"{}\"'.format(\n                    self._logcat_marker_tag,\n                    self._logcat_marker_msg.format(self._marker_count)\n                )\n            )\n            self._marker_count += 1\n\n    def check_logcat_nowrap(self, outfile):\n        parser = LogcatParser()\n        counter = self._start_marker\n        for event in parser.parse(outfile):\n            message = self._logcat_marker_msg.split(':')[0]\n            if not (event.tag == self._logcat_marker_tag\n                    and event.message.split(':')[0] == message):\n                continue\n\n            number = int(event.message.split(':')[1])\n            if number > counter:\n                return False\n            elif number == counter:\n                counter += 1\n\n            if counter == self._end_marker:\n                return True\n\n        return False\n\n    def start_logcat_wrap_detect(self):\n        with self.lock:\n            self._start_marker = self._marker_count\n            self.insert_logcat_marker()\n\n    def stop_logcat_wrap_detect(self):\n        with self.lock:\n            self._end_marker = self._marker_count\n\n\nclass ChromeOsAssistant(LinuxAssistant):\n\n    parameters = LinuxAssistant.parameters + AndroidAssistant.parameters\n\n    def __init__(self, target, logcat_poll_period=None, disable_selinux=True):\n        super(ChromeOsAssistant, self).__init__(target)\n        if target.supports_android:\n            self.android_assistant = AndroidAssistant(target.android_container,\n                                                      logcat_poll_period, disable_selinux)\n        else:\n            self.android_assistant = None\n\n    def start(self):\n        super(ChromeOsAssistant, self).start()\n        if self.android_assistant:\n            self.android_assistant.start()\n\n    def extract_results(self, context):\n        super(ChromeOsAssistant, self).extract_results(context)\n        if self.android_assistant:\n            self.android_assistant.extract_results(context)\n\n    def stop(self):\n        super(ChromeOsAssistant, self).stop()\n        if self.android_assistant:\n            self.android_assistant.stop()\n"
  },
  {
    "path": "wa/framework/target/config.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom copy import copy\n\n\nclass TargetConfig(dict):\n    \"\"\"\n    Represents a configuration for a target.\n\n    \"\"\"\n    def __init__(self, config=None):\n        dict.__init__(self)\n        if isinstance(config, TargetConfig):\n            self.__dict__ = copy(config.__dict__)\n        elif hasattr(config, 'iteritems'):\n            for k, v in config.iteritems:\n                self.set(k, v)\n        elif config:\n            raise ValueError(config)\n\n    def set(self, name, value):\n        setattr(self, name, value)\n"
  },
  {
    "path": "wa/framework/target/descriptor.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport inspect\n\nfrom devlib import (LinuxTarget, AndroidTarget, LocalLinuxTarget,\n                    ChromeOsTarget, Platform, Juno, TC2, Gem5SimulationPlatform,\n                    AdbConnection, SshConnection, LocalConnection,\n                    TelnetConnection, Gem5Connection)\nfrom devlib.target import DEFAULT_SHELL_PROMPT\nfrom devlib.utils.ssh import DEFAULT_SSH_SUDO_COMMAND\n\nfrom wa.framework import pluginloader\nfrom wa.framework.configuration.core import get_config_point_map\nfrom wa.framework.exception import PluginLoaderError\nfrom wa.framework.plugin import Plugin, Parameter\nfrom wa.framework.target.assistant import LinuxAssistant, AndroidAssistant, ChromeOsAssistant\nfrom wa.utils.types import list_of_strings, list_of_ints, regex, identifier, caseless_string\nfrom wa.utils.misc import isiterable\n\n\ndef list_target_descriptions(loader=pluginloader):\n    targets = {}\n    for cls in loader.list_target_descriptors():\n        descriptor = cls()\n        for desc in descriptor.get_descriptions():\n            if desc.name in targets:\n                msg = 'Duplicate target \"{}\" returned by {} and {}'\n                prev_dtor = targets[desc.name].source\n                raise PluginLoaderError(msg.format(desc.name, prev_dtor.name,\n                                                   descriptor.name))\n            targets[desc.name] = desc\n    return list(targets.values())\n\n\ndef get_target_description(name, loader=pluginloader):\n    for tdesc in list_target_descriptions(loader):\n        if tdesc.name == name:\n            return tdesc\n    raise ValueError('Could not find target descriptor \"{}\"'.format(name))\n\n\ndef instantiate_target(tdesc, params, connect=None, extra_platform_params=None):\n    # pylint: disable=too-many-locals,too-many-branches\n    target_params = get_config_point_map(tdesc.target_params)\n    platform_params = get_config_point_map(tdesc.platform_params)\n    conn_params = get_config_point_map(tdesc.conn_params)\n    assistant_params = get_config_point_map(tdesc.assistant_params)\n\n    tp, pp, cp = {}, {}, {}\n\n    for supported_params, new_params in (target_params, tp), (platform_params, pp), (conn_params, cp):\n        for name, value in supported_params.items():\n            if value.default and name == value.name:\n                new_params[name] = value.default\n\n    for name, value in params.items():\n        if name in target_params:\n            if not target_params[name].deprecated:\n                tp[name] = value\n        elif name in platform_params:\n            if not platform_params[name].deprecated:\n                pp[name] = value\n        elif name in conn_params:\n            if not conn_params[name].deprecated:\n                cp[name] = value\n        elif name in assistant_params:\n            pass\n        else:\n            msg = 'Unexpected parameter for {}: {}'\n            raise ValueError(msg.format(tdesc.name, name))\n\n    for pname, pval in (extra_platform_params or {}).items():\n        if pname in pp:\n            raise RuntimeError('Platform parameter clash: {}'.format(pname))\n        pp[pname] = pval\n\n    tp['platform'] = (tdesc.platform or Platform)(**pp)\n    if cp:\n        tp['connection_settings'] = cp\n    if tdesc.connection:\n        tp['conn_cls'] = tdesc.connection\n    if connect is not None:\n        tp['connect'] = connect\n\n    return tdesc.target(**tp)\n\n\ndef instantiate_assistant(tdesc, params, target):\n    assistant_params = {}\n    for param in tdesc.assistant_params:\n        if param.name in params:\n            assistant_params[param.name] = params[param.name]\n        elif param.default:\n            assistant_params[param.name] = param.default\n    return tdesc.assistant(target, **assistant_params)\n\n\nclass TargetDescription(object):\n\n    def __init__(self, name, source, description=None, target=None, platform=None,\n                 conn=None, assistant=None, target_params=None, platform_params=None,\n                 conn_params=None, assistant_params=None):\n        self.name = name\n        self.source = source\n        self.description = description\n        self.target = target\n        self.platform = platform\n        self.connection = conn\n        self.assistant = assistant\n        self._set('target_params', target_params)\n        self._set('platform_params', platform_params)\n        self._set('conn_params', conn_params)\n        self._set('assistant_params', assistant_params)\n\n    def get_default_config(self):\n        param_attrs = ['target_params', 'platform_params',\n                       'conn_params', 'assistant_params']\n        config = {}\n        for pattr in param_attrs:\n            for p in getattr(self, pattr):\n                if not p.deprecated:\n                    config[p.name] = p.default\n        return config\n\n    def _set(self, attr, vals):\n        if vals is None:\n            vals = []\n        elif isiterable(vals):\n            if hasattr(vals, 'values'):\n                vals = list(vals.values())\n        else:\n            msg = '{} must be iterable; got \"{}\"'\n            raise ValueError(msg.format(attr, vals))\n        setattr(self, attr, vals)\n\n\nclass TargetDescriptor(Plugin):\n\n    kind = 'target_descriptor'\n\n    def get_descriptions(self):  # pylint: disable=no-self-use\n        return []\n\n\nCOMMON_TARGET_PARAMS = [\n    Parameter('working_directory', kind=str,\n              description='''\n              On-target working directory that will be used by WA. This\n              directory must be writable by the user WA logs in as without\n              the need for privilege elevation.\n              '''),\n    Parameter('executables_directory', kind=str,\n              description='''\n              On-target directory where WA will install its executable\n              binaries.  This location must allow execution. This location does\n              *not* need to be writable by unprivileged users or rooted devices\n              (WA will install with elevated privileges as necessary).\n              '''),\n    Parameter('modules', kind=list,\n              description='''\n              A list of additional modules to be installed for the target.\n\n              ``devlib`` implements functionality for particular subsystems as\n              modules.  A number of \"default\" modules (e.g. for cpufreq\n              subsystem) are loaded automatically, unless explicitly disabled.\n              If additional modules need to be loaded, they may be specified\n              using this parameter.\n\n              Please see ``devlib`` documentation for information on the available\n              modules.\n              '''),\n    Parameter('load_default_modules', kind=bool, default=True,\n              description='''\n              A number of modules (e.g. for working with the cpufreq subsystem) are\n              loaded by default when a Target is instantiated. Setting this to\n              ``True`` would suppress that, ensuring that only the base Target\n              interface is initialized.\n\n              You may want to set this to ``False`` if there is a problem with one\n              or more default modules on your platform (e.g. your device is\n              unrooted and cpufreq is not accessible to unprivileged users), or\n              if ``Target`` initialization is taking too long for your platform.\n              '''),\n    Parameter('shell_prompt', kind=regex, default=DEFAULT_SHELL_PROMPT,\n              description='''\n              A regex that matches the shell prompt on the target.\n              '''),\n\n    Parameter('max_async', kind=int, default=50,\n        description='''\n            The maximum number of concurent asynchronous connections to the\n            target maintained at any time.\n            '''),\n]\n\nCOMMON_PLATFORM_PARAMS = [\n    Parameter('core_names', kind=list_of_strings,\n              description='''\n              List of names of CPU cores in the order that they appear to the\n              kernel. If not specified, it will be inferred from the platform.\n              '''),\n    Parameter('core_clusters', kind=list_of_ints,\n              description='''\n              Cluster mapping corresponding to the cores in ``core_names``.\n              Cluster indexing starts at ``0``.  If not specified, this will be\n              inferred from ``core_names`` -- consecutive cores with the same\n              name will be assumed to share a cluster.\n              '''),\n    Parameter('big_core', kind=str,\n              description='''\n              The name of the big cores in a big.LITTLE system. If not\n              specified, this will be inferred, either from the name (if one of\n              the names in ``core_names`` matches known big cores), or by\n              assuming that the last cluster is big.\n              '''),\n    Parameter('model', kind=str,\n              description='''\n              Hardware model of the platform. If not specified, an attempt will\n              be made to read it from target.\n              '''),\n    Parameter('modules', kind=list,\n              description='''\n              An additional list of modules to be loaded into the target.\n              '''),\n]\n\nVEXPRESS_PLATFORM_PARAMS = [\n    Parameter('serial_port', kind=str,\n              description='''\n              The serial device/port on the host for the initial connection to\n              the target (used for early boot, flashing, etc).\n              '''),\n    Parameter('baudrate', kind=int,\n              description='''\n              Baud rate for the serial connection.\n              '''),\n    Parameter('vemsd_mount', kind=str,\n              description='''\n              VExpress MicroSD card mount location. This is a MicroSD card in\n              the VExpress device that is mounted on the host via USB. The card\n              contains configuration files for the platform and firmware and\n              kernel images to be flashed.\n              '''),\n    Parameter('bootloader', kind=str,\n              allowed_values=['uefi', 'uefi-shell', 'u-boot', 'bootmon'],\n              description='''\n              Selects the bootloader mechanism used by the board. Depending on\n              firmware version, a number of possible boot mechanisms may be use.\n\n              Please see ``devlib`` documentation for descriptions.\n              '''),\n    Parameter('hard_reset_method', kind=str,\n              allowed_values=['dtr', 'reboottxt'],\n              description='''\n              There are a couple of ways to reset VersatileExpress board if the\n              software running on the board becomes unresponsive. Both require\n              configuration to be enabled (please see ``devlib`` documentation).\n\n              ``dtr``: toggle the DTR line on the serial connection\n              ``reboottxt``: create ``reboot.txt`` in the root of the VEMSD mount.\n              '''),\n]\n\nGEM5_PLATFORM_PARAMS = [\n    Parameter('gem5_bin', kind=str, mandatory=True,\n              description='''\n              Path to the gem5 binary\n              '''),\n    Parameter('gem5_args', kind=str, mandatory=True,\n              description='''\n              Arguments to be passed to the gem5 binary\n              '''),\n    Parameter('gem5_virtio', kind=str, mandatory=True,\n              description='''\n              VirtIO device setup arguments to be passed to gem5. VirtIO is used\n              to transfer files between the simulation and the host.\n              '''),\n    Parameter('name', kind=str, default='gem5',\n              description='''\n              The name for the gem5 \"device\".\n              '''),\n]\n\n\nCONNECTION_PARAMS = {\n    AdbConnection: [\n        Parameter(\n            'device', kind=str,\n            aliases=['adb_name'],\n            description=\"\"\"\n            ADB device name\n            \"\"\"),\n        Parameter(\n            'adb_server', kind=str,\n            description=\"\"\"\n            ADB server to connect to.\n            \"\"\"),\n        Parameter(\n            'adb_port', kind=int,\n            description=\"\"\"\n            ADB port to connect to.\n            \"\"\"),\n        Parameter(\n            'poll_transfers', kind=bool,\n            default=True,\n            description=\"\"\"\n            File transfers will be polled for activity. Inactive\n            file transfers are cancelled.\n            \"\"\"),\n        Parameter(\n            'start_transfer_poll_delay', kind=int,\n            default=30,\n            description=\"\"\"\n            How long to wait (s) for a transfer to complete\n            before polling transfer activity. Requires ``poll_transfers``\n            to be set.\n            \"\"\"),\n        Parameter(\n            'total_transfer_timeout', kind=int,\n            default=3600,\n            description=\"\"\"\n            The total time to elapse before a transfer is cancelled, regardless\n            of its activity. Requires ``poll_transfers`` to be set.\n            \"\"\"),\n        Parameter(\n            'transfer_poll_period', kind=int,\n            default=30,\n            description=\"\"\"\n            The period at which transfer activity is sampled. Requires\n            ``poll_transfers`` to be set. Too small values may cause\n            the destination size to appear the same over one or more sample\n            periods, causing improper transfer cancellation.\n            \"\"\"),\n        Parameter(\n            'adb_as_root', kind=bool,\n            default=False,\n            description=\"\"\"\n            Specify whether the adb server should be started in root mode.\n            \"\"\")\n    ],\n    SshConnection: [\n        Parameter(\n            'host', kind=str, mandatory=True,\n            description=\"\"\"\n            Host name or IP address of the target.\n            \"\"\"),\n        Parameter(\n            'username', kind=str, mandatory=True,\n            description=\"\"\"\n            User name to connect with\n            \"\"\"),\n        Parameter(\n            'password', kind=str,\n            description=\"\"\"\n            Password to use.\n            (When connecting to a passwordless machine set to an\n            empty string to prevent attempting ssh key authentication.)\n            \"\"\"),\n        Parameter(\n            'keyfile', kind=str,\n            description=\"\"\"\n            Key file to use\n            \"\"\"),\n        Parameter(\n            'port', kind=int,\n            default=22,\n            description=\"\"\"\n            The port SSH server is listening on on the target.\n            \"\"\"),\n        Parameter(\n            'strict_host_check', kind=bool, default=False,\n            description=\"\"\"\n            Specify whether devices should be connected to if\n            their host key does not match the systems known host keys. \"\"\"),\n        Parameter(\n            'sudo_cmd', kind=str,\n            default=DEFAULT_SSH_SUDO_COMMAND,\n            description=\"\"\"\n            Sudo command to use. Must have ``{}`` specified\n            somewhere in the string it indicate where the command\n            to be run via sudo is to go.\n            \"\"\"),\n        Parameter(\n            'use_scp', kind=bool,\n            default=False,\n            description=\"\"\"\n            Allow using SCP as method of file transfer instead\n            of the default SFTP.\n            \"\"\"),\n        Parameter(\n            'poll_transfers', kind=bool,\n            default=True,\n            description=\"\"\"\n            File transfers will be polled for activity. Inactive\n            file transfers are cancelled.\n            \"\"\"),\n        Parameter(\n            'start_transfer_poll_delay', kind=int,\n            default=30,\n            description=\"\"\"\n            How long to wait (s) for a transfer to complete\n            before polling transfer activity. Requires ``poll_transfers``\n            to be set.\n            \"\"\"),\n        Parameter(\n            'total_transfer_timeout', kind=int,\n            default=3600,\n            description=\"\"\"\n            The total time to elapse before a transfer is cancelled, regardless\n            of its activity. Requires ``poll_transfers`` to be set.\n            \"\"\"),\n        Parameter(\n            'transfer_poll_period', kind=int,\n            default=30,\n            description=\"\"\"\n            The period at which transfer activity is sampled. Requires\n            ``poll_transfers`` to be set. Too small values may cause\n            the destination size to appear the same over one or more sample\n            periods, causing improper transfer cancellation.\n            \"\"\"),\n        # Deprecated Parameters\n        Parameter(\n            'telnet', kind=str,\n            description=\"\"\"\n            Original shell prompt to expect.\n            \"\"\",\n            deprecated=True),\n        Parameter(\n            'password_prompt', kind=str,\n            description=\"\"\"\n            Password prompt to expect\n            \"\"\",\n            deprecated=True),\n        Parameter(\n            'original_prompt', kind=str,\n            description=\"\"\"\n            Original shell prompt to expect.\n            \"\"\",\n            deprecated=True),\n    ],\n    TelnetConnection: [\n        Parameter(\n            'host', kind=str, mandatory=True,\n            description=\"\"\"\n            Host name or IP address of the target.\n            \"\"\"),\n        Parameter(\n            'username', kind=str, mandatory=True,\n            description=\"\"\"\n            User name to connect with\n            \"\"\"),\n        Parameter(\n            'password', kind=str,\n            description=\"\"\"\n            Password to use.\n            \"\"\"),\n        Parameter(\n            'port', kind=int,\n            description=\"\"\"\n            The port SSH server is listening on on the target.\n            \"\"\"),\n        Parameter(\n            'password_prompt', kind=str,\n            description=\"\"\"\n            Password prompt to expect\n            \"\"\"),\n        Parameter(\n            'original_prompt', kind=str,\n            description=\"\"\"\n            Original shell prompt to expect.\n            \"\"\"),\n        Parameter(\n            'sudo_cmd', kind=str,\n            default=\"sudo -- sh -c {}\",\n            description=\"\"\"\n            Sudo command to use. Must have ``{}`` specified\n            somewhere in the string it indicate where the command\n            to be run via sudo is to go.\n            \"\"\"),\n    ],\n    Gem5Connection: [\n        Parameter(\n            'host', kind=str, mandatory=False,\n            description=\"\"\"\n            Host name or IP address of the target.\n            \"\"\"),\n        Parameter(\n            'username', kind=str, default='root',\n            description=\"\"\"\n            User name to connect to gem5 simulation.\n            \"\"\"),\n        Parameter(\n            'password', kind=str,\n            description=\"\"\"\n            Password to use.\n            \"\"\"),\n        Parameter(\n            'port', kind=int,\n            description=\"\"\"\n            The port SSH server is listening on on the target.\n            \"\"\"),\n        Parameter(\n            'password_prompt', kind=str,\n            description=\"\"\"\n            Password prompt to expect\n            \"\"\"),\n        Parameter(\n            'original_prompt', kind=str,\n            description=\"\"\"\n            Original shell prompt to expect.\n            \"\"\"),\n    ],\n    LocalConnection: [\n        Parameter(\n            'password', kind=str,\n            description=\"\"\"\n            Password to use for sudo. if not specified, the user will\n            be prompted during intialization.\n            \"\"\"),\n        Parameter(\n            'keep_password', kind=bool, default=True,\n            description=\"\"\"\n            If ``True`` (the default), the password will be cached in\n            memory after it is first obtained from the user, so that the\n            user would not be prompted for it again.\n            \"\"\"),\n        Parameter(\n            'unrooted', kind=bool, default=False,\n            description=\"\"\"\n            Indicate that the target should be considered unrooted; do not\n            attempt sudo or ask the user for their password.\n            \"\"\"),\n    ],\n}\n\nCONNECTION_PARAMS['ChromeOsConnection'] = \\\n    CONNECTION_PARAMS[AdbConnection] + CONNECTION_PARAMS[SshConnection]\n\n\n# name --> ((target_class, conn_class, unsupported_platforms), params_list, defaults)\nTARGETS = {\n    'linux': ((LinuxTarget, SshConnection, []), COMMON_TARGET_PARAMS, None),\n    'android': ((AndroidTarget, AdbConnection, []), COMMON_TARGET_PARAMS +\n               [Parameter('package_data_directory', kind=str, default='/data/data',\n                          description='''\n                          Directory containing Android data\n                          '''),\n               ], None),\n    'chromeos': ((ChromeOsTarget, 'ChromeOsConnection', []), COMMON_TARGET_PARAMS +\n                [Parameter('package_data_directory', kind=str, default='/data/data',\n                           description='''\n                           Directory containing Android data\n                           '''),\n                Parameter('android_working_directory', kind=str,\n                          description='''\n                          On-target working directory that will be used by WA for the\n                          android container. This directory must be writable by the user\n                          WA logs in as without the need for privilege elevation.\n                          '''),\n                Parameter('android_executables_directory', kind=str,\n                          description='''\n                          On-target directory where WA will install its executable\n                          binaries for the android container. This location must allow execution.\n                          This location does *not* need to be writable by unprivileged users or\n                          rooted devices (WA will install with elevated privileges as necessary).\n                          directory must be writable by the user WA logs in as without\n                          the need for privilege elevation.\n                          '''),\n                ], None),\n    'local': ((LocalLinuxTarget, LocalConnection, [Juno, Gem5SimulationPlatform, TC2]),\n              COMMON_TARGET_PARAMS, None),\n}\n\n# name --> assistant\nASSISTANTS = {\n    'linux': LinuxAssistant,\n    'android': AndroidAssistant,\n    'local': LinuxAssistant,\n    'chromeos': ChromeOsAssistant\n}\n\n# Platform specific parameter overrides.\nJUNO_PLATFORM_OVERRIDES = [\n        Parameter('baudrate', kind=int, default=115200,\n                description='''\n                Baud rate for the serial connection.\n                '''),\n        Parameter('vemsd_mount', kind=str, default='/media/JUNO',\n                description='''\n                VExpress MicroSD card mount location. This is a MicroSD card in\n                the VExpress device that is mounted on the host via USB. The card\n                contains configuration files for the platform and firmware and\n                kernel images to be flashed.\n                '''),\n        Parameter('bootloader', kind=str, default='u-boot',\n                allowed_values=['uefi', 'uefi-shell', 'u-boot', 'bootmon'],\n                description='''\n                Selects the bootloader mechanism used by the board. Depending on\n                firmware version, a number of possible boot mechanisms may be use.\n\n                Please see ``devlib`` documentation for descriptions.\n                '''),\n        Parameter('hard_reset_method', kind=str, default='dtr',\n                allowed_values=['dtr', 'reboottxt'],\n                description='''\n                There are a couple of ways to reset VersatileExpress board if the\n                software running on the board becomes unresponsive. Both require\n                configuration to be enabled (please see ``devlib`` documentation).\n\n                ``dtr``: toggle the DTR line on the serial connection\n                ``reboottxt``: create ``reboot.txt`` in the root of the VEMSD mount.\n                '''),\n]\nTC2_PLATFORM_OVERRIDES = [\n        Parameter('baudrate', kind=int, default=38400,\n                description='''\n                Baud rate for the serial connection.\n                '''),\n        Parameter('vemsd_mount', kind=str, default='/media/VEMSD',\n                description='''\n                VExpress MicroSD card mount location. This is a MicroSD card in\n                the VExpress device that is mounted on the host via USB. The card\n                contains configuration files for the platform and firmware and\n                kernel images to be flashed.\n                '''),\n        Parameter('bootloader', kind=str, default='bootmon',\n                allowed_values=['uefi', 'uefi-shell', 'u-boot', 'bootmon'],\n                description='''\n                Selects the bootloader mechanism used by the board. Depending on\n                firmware version, a number of possible boot mechanisms may be use.\n\n                Please see ``devlib`` documentation for descriptions.\n                '''),\n        Parameter('hard_reset_method', kind=str, default='reboottxt',\n                allowed_values=['dtr', 'reboottxt'],\n                description='''\n                There are a couple of ways to reset VersatileExpress board if the\n                software running on the board becomes unresponsive. Both require\n                configuration to be enabled (please see ``devlib`` documentation).\n\n                ``dtr``: toggle the DTR line on the serial connection\n                ``reboottxt``: create ``reboot.txt`` in the root of the VEMSD mount.\n                '''),\n]\n\n# name --> ((platform_class, conn_class, conn_overrides), params_list, defaults, target_overrides)\n# Note: normally, connection is defined by the Target name, but\n#       platforms may choose to override it\n# Note: the target_overrides allows you to override common target_params for a\n# particular platform. Parameters you can override are in COMMON_TARGET_PARAMS\n# Example of overriding one of the target parameters: Replace last `None` with\n# a list of `Parameter` objects to be used instead.\nPLATFORMS = {\n    'generic': ((Platform, None, None), COMMON_PLATFORM_PARAMS, None, None),\n    'juno': ((Juno, None, [\n                            Parameter('host', kind=str, mandatory=False,\n                            description=\"Host name or IP address of the target.\"),\n                          ]\n            ), COMMON_PLATFORM_PARAMS + VEXPRESS_PLATFORM_PARAMS, JUNO_PLATFORM_OVERRIDES, None),\n    'tc2': ((TC2, None, None), COMMON_PLATFORM_PARAMS + VEXPRESS_PLATFORM_PARAMS,\n            TC2_PLATFORM_OVERRIDES, None),\n    'gem5': ((Gem5SimulationPlatform, Gem5Connection, None), GEM5_PLATFORM_PARAMS, None, None),\n}\n\n\nclass DefaultTargetDescriptor(TargetDescriptor):\n\n    name = 'devlib_targets'\n\n    description = \"\"\"\n    The default target descriptor that provides descriptions in the form\n    <platform>_<target>.\n\n    These map directly onto ``Target``\\ s and ``Platform``\\ s supplied by ``devlib``.\n\n    \"\"\"\n\n    def get_descriptions(self):\n        # pylint: disable=attribute-defined-outside-init,too-many-locals\n        result = []\n        for target_name, target_tuple in TARGETS.items():\n            (target, conn, unsupported_platforms), target_params = self._get_item(target_tuple)\n            assistant = ASSISTANTS[target_name]\n            conn_params = CONNECTION_PARAMS[conn]\n            for platform_name, platform_tuple in PLATFORMS.items():\n                platform_target_defaults = platform_tuple[-1]\n                platform_tuple = platform_tuple[0:-1]\n                (platform, plat_conn, conn_defaults), platform_params = self._get_item(platform_tuple)\n                if platform in unsupported_platforms:\n                    continue\n                # Add target defaults specified in the Platform tuple\n                target_params = self._override_params(target_params, platform_target_defaults)\n                name = '{}_{}'.format(platform_name, target_name)\n                td = TargetDescription(name, self)\n                td.target = target\n                td.platform = platform\n                td.assistant = assistant\n                td.target_params = target_params\n                td.platform_params = platform_params\n                td.assistant_params = assistant.parameters\n\n                if plat_conn:\n                    td.conn = plat_conn\n                    td.conn_params = self._override_params(CONNECTION_PARAMS[plat_conn],\n                                                           conn_defaults)\n                else:\n                    td.conn = conn\n                    td.conn_params = self._override_params(conn_params, conn_defaults)\n\n                result.append(td)\n        return result\n\n    def _override_params(self, params, overrides): # pylint: disable=no-self-use\n        ''' Returns a new list of parameters replacing any parameter with the\n        corresponding parameter in overrides'''\n        if not overrides:\n            return params\n        param_map = {p.name: p for p in params}\n        for override in overrides:\n            if override.name in param_map:\n                param_map[override.name] = override\n        # Return the list of overriden parameters\n        return list(param_map.values())\n\n    def _get_item(self, item_tuple):\n        cls_tuple, params, defaults = item_tuple\n        updated_params = self._override_params(params, defaults)\n        return cls_tuple, updated_params\n\n\n_adhoc_target_descriptions = []\n\n\ndef create_target_description(name, *args, **kwargs):\n    name = identifier(name)\n    for td in _adhoc_target_descriptions:\n        if caseless_string(name) == td.name:\n            msg = 'Target with name \"{}\" already exists (from source: {})'\n            raise ValueError(msg.format(name, td.source))\n\n    stack = inspect.stack()\n    # inspect.stack() returns a list of call frame records for the current thread\n    # in reverse call order. So the first entry is for the current frame and next one\n    # for the immediate caller. Each entry is a tuple in the format\n    #  (frame_object, module_path, line_no, function_name, source_lines, source_lines_index)\n    #\n    # Here we assign the path of the calling module as the \"source\" for this description.\n    # because this might be invoked via the add_scription_for_target wrapper, we need to\n    # check for that, and make sure that we get the info for *its* caller in that case.\n    if stack[1][3] == 'add_description_for_target':\n        source = stack[2][1]\n    else:\n        source = stack[1][1]\n\n    _adhoc_target_descriptions.append(TargetDescription(name, source, *args, **kwargs))\n\n\ndef _get_target_defaults(target):\n    specificity = 0\n    res = ('linux', TARGETS['linux'])  # fallback to a generic linux target\n    for name, ttup in TARGETS.items():\n        if issubclass(target, ttup[0][0]):\n            new_spec = len(inspect.getmro(ttup[0][0]))\n            if new_spec > specificity:\n                res = (name, ttup)\n                specificity = new_spec\n    return res\n\n\ndef add_description_for_target(target, description=None, **kwargs):\n    (base_name, ((_, base_conn, _), base_params, _)) = _get_target_defaults(target)\n\n    if 'target_params' not in kwargs:\n        kwargs['target_params'] = base_params\n\n    if 'platform' not in kwargs:\n        kwargs['platform'] = Platform\n    if 'platform_params' not in kwargs:\n        for (plat, conn, _), params, _, _ in PLATFORMS.values():\n            if plat == kwargs['platform']:\n                kwargs['platform_params'] = params\n                if conn is not None and kwargs['conn'] is None:\n                    kwargs['conn'] = conn\n                break\n\n    if 'conn' not in kwargs:\n        kwargs['conn'] = base_conn\n    if 'conn_params' not in kwargs:\n        kwargs['conn_params'] = CONNECTION_PARAMS.get(kwargs['conn'])\n\n    if 'assistant' not in kwargs:\n        kwargs['assistant'] = ASSISTANTS.get(base_name)\n\n    create_target_description(target.name, target=target, description=description, **kwargs)\n\n\nclass SimpleTargetDescriptor(TargetDescriptor):\n\n    name = 'adhoc_targets'\n\n    description = \"\"\"\n    Returns target descriptions added with ``create_target_description``.\n\n    \"\"\"\n\n    def get_descriptions(self):\n        return _adhoc_target_descriptions\n"
  },
  {
    "path": "wa/framework/target/info.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# pylint: disable=protected-access\n\nimport os\n\nfrom devlib import AndroidTarget, TargetError\nfrom devlib.target import KernelConfig, KernelVersion, Cpuinfo\nfrom devlib.utils.android import AndroidProperties\n\nfrom wa.framework.configuration.core import settings\nfrom wa.framework.exception import ConfigError\nfrom wa.utils.serializer import read_pod, write_pod, Podable\nfrom wa.utils.misc import atomic_write_path\n\n\ndef cpuinfo_from_pod(pod):\n    cpuinfo = Cpuinfo('')\n    cpuinfo.sections = pod['cpuinfo']\n    lines = []\n    for section in cpuinfo.sections:\n        for key, value in section.items():\n            line = '{}: {}'.format(key, value)\n            lines.append(line)\n        lines.append('')\n    cpuinfo.text = '\\n'.join(lines)\n    return cpuinfo\n\n\ndef kernel_version_from_pod(pod):\n    release_string = pod['kernel_release']\n    version_string = pod['kernel_version']\n    if release_string:\n        if version_string:\n            kernel_string = '{} #{}'.format(release_string, version_string)\n        else:\n            kernel_string = release_string\n    else:\n        kernel_string = '#{}'.format(version_string)\n    return KernelVersion(kernel_string)\n\n\ndef kernel_config_from_pod(pod):\n    config = KernelConfig('')\n    config.typed_config._config = pod['kernel_config']\n    lines = []\n    for key, value in config.items():\n        if value == 'n':\n            lines.append('# {} is not set'.format(key))\n        else:\n            lines.append('{}={}'.format(key, value))\n    config.text = '\\n'.join(lines)\n    return config\n\n\nclass CpufreqInfo(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = CpufreqInfo._upgrade_pod(pod)\n        return CpufreqInfo(**pod)\n\n    def __init__(self, **kwargs):\n        super(CpufreqInfo, self).__init__()\n        self.available_frequencies = kwargs.pop('available_frequencies', [])\n        self.available_governors = kwargs.pop('available_governors', [])\n        self.related_cpus = kwargs.pop('related_cpus', [])\n        self.driver = kwargs.pop('driver', None)\n        self._pod_version = kwargs.pop('_pod_version', self._pod_serialization_version)\n\n    def to_pod(self):\n        pod = super(CpufreqInfo, self).to_pod()\n        pod.update(self.__dict__)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __repr__(self):\n        return 'Cpufreq({} {})'.format(self.driver, self.related_cpus)\n\n    __str__ = __repr__\n\n\nclass IdleStateInfo(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = IdleStateInfo._upgrade_pod(pod)\n        return IdleStateInfo(**pod)\n\n    def __init__(self, **kwargs):\n        super(IdleStateInfo, self).__init__()\n        self.name = kwargs.pop('name', None)\n        self.desc = kwargs.pop('desc', None)\n        self.power = kwargs.pop('power', None)\n        self.latency = kwargs.pop('latency', None)\n        self._pod_version = kwargs.pop('_pod_version', self._pod_serialization_version)\n\n    def to_pod(self):\n        pod = super(IdleStateInfo, self).to_pod()\n        pod.update(self.__dict__)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __repr__(self):\n        return 'IdleState({}/{})'.format(self.name, self.desc)\n\n    __str__ = __repr__\n\n\nclass CpuidleInfo(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        pod = CpuidleInfo._upgrade_pod(pod)\n        instance = CpuidleInfo()\n        instance._pod_version = pod['_pod_version']\n        instance.governor = pod['governor']\n        instance.driver = pod['driver']\n        instance.states = [IdleStateInfo.from_pod(s) for s in pod['states']]\n        return instance\n\n    @property\n    def num_states(self):\n        return len(self.states)\n\n    def __init__(self):\n        super(CpuidleInfo, self).__init__()\n        self.governor = None\n        self.driver = None\n        self.states = []\n\n    def to_pod(self):\n        pod = super(CpuidleInfo, self).to_pod()\n        pod['governor'] = self.governor\n        pod['driver'] = self.driver\n        pod['states'] = [s.to_pod() for s in self.states]\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __repr__(self):\n        return 'Cpuidle({}/{} {} states)'.format(\n            self.governor, self.driver, self.num_states)\n\n    __str__ = __repr__\n\n\nclass CpuInfo(Podable):\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        instance = super(CpuInfo, CpuInfo).from_pod(pod)\n        instance.id = pod['id']\n        instance.name = pod['name']\n        instance.architecture = pod['architecture']\n        instance.features = pod['features']\n        instance.cpufreq = CpufreqInfo.from_pod(pod['cpufreq'])\n        instance.cpuidle = CpuidleInfo.from_pod(pod['cpuidle'])\n        return instance\n\n    def __init__(self):\n        super(CpuInfo, self).__init__()\n        self.id = None\n        self.name = None\n        self.architecture = None\n        self.features = []\n        self.cpufreq = CpufreqInfo()\n        self.cpuidle = CpuidleInfo()\n\n    def to_pod(self):\n        pod = super(CpuInfo, self).to_pod()\n        pod['id'] = self.id\n        pod['name'] = self.name\n        pod['architecture'] = self.architecture\n        pod['features'] = self.features\n        pod['cpufreq'] = self.cpufreq.to_pod()\n        pod['cpuidle'] = self.cpuidle.to_pod()\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n    def __repr__(self):\n        return 'Cpu({} {})'.format(self.id, self.name)\n\n    __str__ = __repr__\n\n\ndef get_target_info(target):\n    info = TargetInfo()\n    info.target = target.__class__.__name__\n    info.modules = target.modules\n    info.os = target.os\n    info.os_version = target.os_version\n    info.system_id = target.system_id\n    info.abi = target.abi\n    info.is_rooted = target.is_rooted\n    info.kernel_version = target.kernel_version\n    info.kernel_config = target.config\n    info.hostname = target.hostname\n    info.hostid = target.hostid\n\n    try:\n        info.sched_features = target.read_value('/sys/kernel/debug/sched_features').split()\n    except TargetError:\n        # best effort -- debugfs might not be mounted\n        pass\n\n    for i, name in enumerate(target.cpuinfo.cpu_names):\n        cpu = CpuInfo()\n        cpu.id = i\n        cpu.name = name\n        cpu.features = target.cpuinfo.get_cpu_features(i)\n        cpu.architecture = target.cpuinfo.architecture\n\n        if target.has('cpufreq'):\n            cpu.cpufreq.available_governors = target.cpufreq.list_governors(i)\n            cpu.cpufreq.available_frequencies = target.cpufreq.list_frequencies(i)\n            cpu.cpufreq.related_cpus = target.cpufreq.get_related_cpus(i)\n            cpu.cpufreq.driver = target.cpufreq.get_driver(i)\n\n        if target.has('cpuidle'):\n            cpu.cpuidle.driver = target.cpuidle.get_driver()\n            cpu.cpuidle.governor = target.cpuidle.get_governor()\n            for state in target.cpuidle.get_states(i):\n                state_info = IdleStateInfo()\n                state_info.name = state.name\n                state_info.desc = state.desc\n                state_info.power = state.power\n                state_info.latency = state.latency\n                cpu.cpuidle.states.append(state_info)\n\n        info.cpus.append(cpu)\n\n    info.page_size_kb = target.page_size_kb\n\n    if isinstance(target, AndroidTarget):\n        info.screen_resolution = target.screen_resolution\n        info.prop = target.getprop()\n        info.android_id = target.android_id\n\n    return info\n\n\ndef read_target_info_cache():\n    if not os.path.exists(settings.cache_directory):\n        os.makedirs(settings.cache_directory)\n    if not os.path.isfile(settings.target_info_cache_file):\n        return {}\n    return read_pod(settings.target_info_cache_file)\n\n\ndef write_target_info_cache(cache):\n    if not os.path.exists(settings.cache_directory):\n        os.makedirs(settings.cache_directory)\n    with atomic_write_path(settings.target_info_cache_file) as at_path:\n        write_pod(cache, at_path)\n\n\ndef get_target_info_from_cache(system_id, cache=None):\n    if cache is None:\n        cache = read_target_info_cache()\n    pod = cache.get(system_id, None)\n\n    if not pod:\n        return None\n\n    _pod_version = pod.get('_pod_version', 0)\n    if _pod_version != TargetInfo._pod_serialization_version:\n        msg = 'Target info version mismatch. Expected {}, but found {}.\\nTry deleting {}'\n        raise ConfigError(msg.format(TargetInfo._pod_serialization_version, _pod_version,\n                                     settings.target_info_cache_file))\n    return TargetInfo.from_pod(pod)\n\n\ndef cache_target_info(target_info, overwrite=False, cache=None):\n    if cache is None:\n        cache = read_target_info_cache()\n    if target_info.system_id in cache and not overwrite:\n        raise ValueError('TargetInfo for {} is already in cache.'.format(target_info.system_id))\n    cache[target_info.system_id] = target_info.to_pod()\n    write_target_info_cache(cache)\n\n\nclass TargetInfo(Podable):\n\n    _pod_serialization_version = 5\n\n    @staticmethod\n    def from_pod(pod):\n        instance = super(TargetInfo, TargetInfo).from_pod(pod)\n        instance.target = pod['target']\n        instance.modules = pod['modules']\n        instance.abi = pod['abi']\n        instance.cpus = [CpuInfo.from_pod(c) for c in pod['cpus']]\n        instance.os = pod['os']\n        instance.os_version = pod['os_version']\n        instance.system_id = pod['system_id']\n        instance.hostid = pod['hostid']\n        instance.hostname = pod['hostname']\n        instance.abi = pod['abi']\n        instance.is_rooted = pod['is_rooted']\n        instance.kernel_version = kernel_version_from_pod(pod)\n        instance.kernel_config = kernel_config_from_pod(pod)\n        instance.sched_features = pod['sched_features']\n        instance.page_size_kb = pod.get('page_size_kb')\n        if instance.os == 'android':\n            instance.screen_resolution = pod['screen_resolution']\n            instance.prop = AndroidProperties('')\n            instance.prop._properties = pod['prop']\n            instance.android_id = pod['android_id']\n\n        return instance\n\n    def __init__(self):\n        super(TargetInfo, self).__init__()\n        self.target = None\n        self.modules = []\n        self.cpus = []\n        self.os = None\n        self.os_version = None\n        self.system_id = None\n        self.hostid = None\n        self.hostname = None\n        self.abi = None\n        self.is_rooted = None\n        self.kernel_version = None\n        self.kernel_config = None\n        self.sched_features = None\n        self.screen_resolution = None\n        self.prop = None\n        self.android_id = None\n        self.page_size_kb = None\n\n    def to_pod(self):\n        pod = super(TargetInfo, self).to_pod()\n        pod['target'] = self.target\n        pod['modules'] = self.modules\n        pod['abi'] = self.abi\n        pod['cpus'] = [c.to_pod() for c in self.cpus]\n        pod['os'] = self.os\n        pod['os_version'] = self.os_version\n        pod['system_id'] = self.system_id\n        pod['hostid'] = self.hostid\n        pod['hostname'] = self.hostname\n        pod['abi'] = self.abi\n        pod['is_rooted'] = self.is_rooted\n        pod['kernel_release'] = self.kernel_version.release\n        pod['kernel_version'] = self.kernel_version.version\n        pod['kernel_config'] = dict(self.kernel_config.iteritems())\n        pod['sched_features'] = self.sched_features\n        pod['page_size_kb'] = self.page_size_kb\n        if self.os == 'android':\n            pod['screen_resolution'] = self.screen_resolution\n            pod['prop'] = self.prop._properties\n            pod['android_id'] = self.android_id\n\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        pod['cpus'] = pod.get('cpus', [])\n        pod['system_id'] = pod.get('system_id')\n        pod['hostid'] = pod.get('hostid')\n        pod['hostname'] = pod.get('hostname')\n        pod['sched_features'] = pod.get('sched_features')\n        pod['screen_resolution'] = pod.get('screen_resolution', (0, 0))\n        pod['prop'] = pod.get('prop')\n        pod['android_id'] = pod.get('android_id')\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v2(pod):\n        pod['page_size_kb'] = pod.get('page_size_kb')\n        pod['_pod_version'] = pod.get('format_version', 0)\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v3(pod):\n        config = {}\n        for key, value in pod['kernel_config'].items():\n            config[key.upper()] = value\n        pod['kernel_config'] = config\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v4(pod):\n        return TargetInfo._pod_upgrade_v3(pod)\n\n    @staticmethod\n    def _pod_upgrade_v5(pod):\n        pod['modules'] = pod.get('modules') or []\n        return pod\n"
  },
  {
    "path": "wa/framework/target/manager.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\n\nfrom devlib import Gem5SimulationPlatform\nfrom devlib.utils.misc import memoized\n\nfrom wa.framework import signal\nfrom wa.framework.exception import ExecutionError, TargetError, TargetNotRespondingError\nfrom wa.framework.plugin import Parameter\nfrom wa.framework.target.descriptor import (get_target_description,\n                                            instantiate_target,\n                                            instantiate_assistant)\nfrom wa.framework.target.info import (get_target_info, get_target_info_from_cache,\n                                      cache_target_info, read_target_info_cache)\nfrom wa.framework.target.runtime_parameter_manager import RuntimeParameterManager\nfrom wa.utils.types import module_name_set\n\n\nclass TargetManager(object):\n    \"\"\"\n    Instantiate the required target and perform configuration and validation of the device.\n    \"\"\"\n\n    parameters = [\n        Parameter('disconnect', kind=bool, default=False,\n                  description=\"\"\"\n                  Specifies whether the target should be disconnected from\n                  at the end of the run.\n                  \"\"\"),\n    ]\n\n    def __init__(self, name, parameters, outdir):\n        self.outdir = outdir\n        self.logger = logging.getLogger('tm')\n        self.target_name = name\n        self.target = None\n        self.assistant = None\n        self.platform_name = None\n        self.is_responsive = None\n        self.rpm = None\n        self.parameters = parameters\n        self.disconnect = parameters.get('disconnect')\n\n    def initialize(self):\n        self._init_target()\n        self.assistant.initialize()\n\n        # If target supports hotplugging, online all cpus before perform discovery\n        # and restore original configuration after completed.\n        if self.target.has('hotplug'):\n            online_cpus = self.target.list_online_cpus()\n            try:\n                self.target.hotplug.online_all()\n            except TargetError:\n                msg = 'Failed to online all CPUS - some information may not be '\\\n                      'able to be retrieved.'\n                self.logger.debug(msg)\n            self.rpm = RuntimeParameterManager(self.target)\n            all_cpus = set(range(self.target.number_of_cpus))\n            self.target.hotplug.offline(*all_cpus.difference(online_cpus))\n        else:\n            self.rpm = RuntimeParameterManager(self.target)\n\n    def finalize(self):\n        if not self.target:\n            return\n        if self.assistant:\n            self.assistant.finalize()\n        if self.disconnect or isinstance(self.target.platform, Gem5SimulationPlatform):\n            self.logger.info('Disconnecting from the device')\n            with signal.wrap('TARGET_DISCONNECT'):\n                self.target.disconnect()\n\n    def start(self):\n        self.assistant.start()\n\n    def stop(self):\n        self.assistant.stop()\n\n    def extract_results(self, context):\n        self.assistant.extract_results(context)\n\n    @memoized\n    def get_target_info(self):\n        cache = read_target_info_cache()\n        info = get_target_info_from_cache(self.target.system_id, cache=cache)\n\n        if info is None:\n            info = get_target_info(self.target)\n            cache_target_info(info, cache=cache)\n        else:\n            # If module configuration has changed form when the target info\n            # was previously cached, it is possible additional info will be\n            # available, so should re-generate the cache.\n            if module_name_set(info.modules) != module_name_set(self.target.modules):\n                info = get_target_info(self.target)\n                cache_target_info(info, overwrite=True, cache=cache)\n\n        return info\n\n    def reboot(self, context, hard=False):\n        with signal.wrap('REBOOT', self, context):\n            self.target.reboot(hard)\n\n    def merge_runtime_parameters(self, parameters):\n        return self.rpm.merge_runtime_parameters(parameters)\n\n    def validate_runtime_parameters(self, parameters):\n        self.rpm.validate_runtime_parameters(parameters)\n\n    def commit_runtime_parameters(self, parameters):\n        self.rpm.commit_runtime_parameters(parameters)\n\n    def verify_target_responsive(self, context):\n        can_reboot = context.reboot_policy.can_reboot\n        if not self.target.check_responsive(explode=False):\n            self.is_responsive = False\n            if not can_reboot:\n                raise TargetNotRespondingError('Target unresponsive and is not allowed to reboot.')\n            elif self.target.has('hard_reset'):\n                self.logger.info('Target unresponsive; performing hard reset')\n                self.reboot(context, hard=True)\n                self.is_responsive = True\n                raise ExecutionError('Target became unresponsive but was recovered.')\n            else:\n                raise TargetNotRespondingError('Target unresponsive and hard reset not supported; bailing.')\n\n    def _init_target(self):\n        tdesc = get_target_description(self.target_name)\n\n        extra_plat_params = {}\n        if tdesc.platform is Gem5SimulationPlatform:\n            extra_plat_params['host_output_dir'] = self.outdir\n\n        self.logger.debug('Creating {} target'.format(self.target_name))\n        self.target = instantiate_target(tdesc, self.parameters, connect=False,\n                                         extra_platform_params=extra_plat_params)\n\n        self.is_responsive = True\n\n        with signal.wrap('TARGET_CONNECT'):\n            self.target.connect()\n        self.logger.info('Setting up target')\n        self.target.setup()\n\n        self.assistant = instantiate_assistant(tdesc, self.parameters, self.target)\n"
  },
  {
    "path": "wa/framework/target/runtime_config.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\nimport time\nfrom collections import defaultdict, OrderedDict\nfrom copy import copy\n\nfrom devlib.exception import TargetError\nfrom devlib.utils.misc import unique\nfrom devlib.utils.types import integer\n\nfrom wa.framework.exception import ConfigError\nfrom wa.framework.plugin import Plugin, Parameter\nfrom wa.utils.misc import resolve_cpus, resolve_unique_domain_cpus\nfrom wa.utils.types import caseless_string, enum\n\n\nlogger = logging.getLogger('RuntimeConfig')\n\n\nclass RuntimeParameter(Parameter):\n    def __init__(self, name, setter, setter_params=None, **kwargs):\n        super(RuntimeParameter, self).__init__(name, **kwargs)\n        self.setter = setter\n        self.setter_params = setter_params or {}\n\n    def set(self, obj, value):\n        self.validate_value(self.name, value)\n        self.setter(obj, value, **self.setter_params)\n\n\nclass RuntimeConfig(Plugin):\n\n    name = None\n    kind = 'runtime-config'\n\n    @property\n    def supported_parameters(self):\n        return list(self._runtime_params.values())\n\n    @property\n    def core_names(self):\n        return unique(self.target.core_names)\n\n    def __init__(self, target, **kwargs):\n        super(RuntimeConfig, self).__init__(**kwargs)\n        self.target = target\n        self._target_checked = False\n        self._runtime_params = {}\n        try:\n            self.initialize()\n        except TargetError:\n            msg = 'Failed to initialize: \"{}\"'\n            self.logger.debug(msg.format(self.name))\n            self._runtime_params = {}\n\n    def initialize(self):\n        raise NotImplementedError()\n\n    def commit(self):\n        raise NotImplementedError()\n\n    def set_runtime_parameter(self, name, value):\n        if not self._target_checked:\n            self.check_target()\n            self._target_checked = True\n        self._runtime_params[name].set(self, value)\n\n    def set_defaults(self):\n        for p in self.supported_parameters:\n            if p.default:\n                self.set_runtime_parameter(p.name, p.default)\n\n    def validate_parameters(self):\n        raise NotImplementedError()\n\n    def check_target(self):\n        raise NotImplementedError()\n\n    def clear(self):\n        raise NotImplementedError()\n\n\nclass HotplugRuntimeConfig(RuntimeConfig):\n    '''\n    NOTE: Currently will fail if trying to hotplug back a core that\n    was hotplugged out when the devlib target was created.\n    '''\n\n    name = 'rt-hotplug'\n\n    @staticmethod\n    def set_num_cores(obj, value, core):\n        cpus = resolve_cpus(core, obj.target)\n        max_cores = len(cpus)\n        value = integer(value)\n        if value > max_cores:\n            msg = 'Cannot set number of {}\\'s to {}; max is {}'\n            raise ValueError(msg.format(core, value, max_cores))\n\n        msg = 'CPU{} Hotplugging already configured'\n        # Set cpus to be enabled\n        for cpu in cpus[:value]:\n            if cpu in obj.num_cores:\n                raise ConfigError(msg.format(cpu))\n            obj.num_cores[cpu] = True\n        # Set the remaining cpus to be disabled.\n        for cpu in cpus[value:]:\n            if cpu in obj.num_cores:\n                raise ConfigError(msg.format(cpu))\n            obj.num_cores[cpu] = False\n\n    def __init__(self, target):\n        self.num_cores = defaultdict(dict)\n        super(HotplugRuntimeConfig, self).__init__(target)\n\n    def initialize(self):\n        if not self.target.has('hotplug'):\n            return\n        param_name = 'num_cores'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(param_name, kind=int,\n                             constraint=lambda x: 0 <= x <= self.target.number_of_cpus,\n                             description=\"\"\"\n                             The number of cpu cores to be online\n                             \"\"\",\n                             setter=self.set_num_cores,\n                             setter_params={'core': None})\n\n        for name in unique(self.target.platform.core_names):\n            param_name = 'num_{}_cores'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(param_name, kind=int,\n                                 constraint=lambda x, name=name: 0 <= x <= len(self.target.core_cpus(name)),\n                                 description=\"\"\"\n                                 The number of {} cores to be online\n                                 \"\"\".format(name),\n                                 setter=self.set_num_cores,\n                                 setter_params={'core': name})\n\n        for cpu_no in range(self.target.number_of_cpus):\n            param_name = 'cpu{}_online'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(param_name, kind=bool,\n                                 description=\"\"\"\n                                 Specify whether cpu{} should be online\n                                 \"\"\".format(cpu_no),\n                                 setter=self.set_num_cores,\n                                 setter_params={'core': cpu_no})\n\n        if self.target.has('bl'):\n            for cluster in ['big', 'little']:\n                param_name = 'num_{}_cores'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(param_name, kind=int,\n                                     constraint=lambda x, c=cluster: 0 <= x <= len(resolve_cpus(c, self.target)),\n                                     description=\"\"\"\n                                     The number of cores on the {} cluster to be online\n                                     \"\"\".format(cluster),\n                                     setter=self.set_num_cores,\n                                     setter_params={'core': cluster})\n\n    def check_target(self):\n        if not self.target.has('hotplug'):\n            raise TargetError('Target does not appear to support hotplug')\n\n    def validate_parameters(self):\n        if self.num_cores and len(self.num_cores) == self.target.number_of_cpus:\n            if all(v is False for v in list(self.num_cores.values())):\n                raise ValueError('Cannot set number of all cores to 0')\n\n    def commit(self):\n        '''Online all CPUs required in order before then off-lining'''\n        num_cores = sorted(self.num_cores.items())\n        for cpu, online in num_cores:\n            if online:\n                self.target.hotplug.online(cpu)\n        for cpu, online in reversed(num_cores):\n            if not online:\n                self.target.hotplug.offline(cpu)\n\n    def clear(self):\n        self.num_cores = defaultdict(dict)\n\n\nclass SysfileValuesRuntimeConfig(RuntimeConfig):\n\n    name = 'rt-sysfiles'\n\n    # pylint: disable=unused-argument\n    @staticmethod\n    def set_sysfile(obj, values, core):\n        for path, value in values.items():\n            verify = True\n            if path.endswith('!'):\n                verify = False\n                path = path[:-1]\n\n            if path in obj.sysfile_values:\n                msg = 'Syspath \"{}:{}\" already specified with a value of \"{}\"'\n                raise ConfigError(msg.format(path, value, obj.sysfile_values[path][0]))\n\n            obj.sysfile_values[path] = (value, verify)\n\n    def __init__(self, target):\n        self.sysfile_values = OrderedDict()\n        super(SysfileValuesRuntimeConfig, self).__init__(target)\n\n    def initialize(self):\n        self._runtime_params['sysfile_values'] = \\\n            RuntimeParameter('sysfile_values', kind=dict, merge=True,\n                             setter=self.set_sysfile,\n                             setter_params={'core': None},\n                             description=\"\"\"\n                             Sysfile path to be set\n                             \"\"\")\n\n    def check_target(self):\n        return True\n\n    def validate_parameters(self):\n        return\n\n    def commit(self):\n        for path, (value, verify) in self.sysfile_values.items():\n            self.target.write_value(path, value, verify=verify)\n\n    def clear(self):\n        self.sysfile_values = OrderedDict()\n\n    def check_exists(self, path):\n        if not self.target.file_exists(path):\n            raise ConfigError('Sysfile \"{}\" does not exist.'.format(path))\n\n\nclass FreqValue(object):\n\n    def __init__(self, values):\n        if values is None:\n            self.values = values\n        else:\n            self.values = sorted(values)\n\n    def __call__(self, value):\n        '''\n        `self.values` can be `None` if the device's supported values could not be retrieved\n        for some reason e.g. the cluster was offline, in this case we assume\n        the user values will be available and allow any integer values.\n        '''\n        if self.values is None:\n            if isinstance(value, int):\n                return value\n            else:\n                msg = 'CPU frequency values could not be retrieved, cannot resolve \"{}\"'\n                raise TargetError(msg.format(value))\n        elif isinstance(value, int) and value in self.values:\n            return value\n        elif isinstance(value, str):\n            value = caseless_string(value)\n            if value in ['min', 'max']:\n                return value\n\n        msg = 'Invalid frequency value: {}; Must be in {}'\n        raise ValueError(msg.format(value, self.values))\n\n    def __str__(self):\n        return 'valid frequency value: {}'.format(self.values)\n\n\nclass CpufreqRuntimeConfig(RuntimeConfig):\n\n    name = 'rt-cpufreq'\n\n    @staticmethod\n    def set_frequency(obj, value, core):\n        obj.set_param(obj, value, core, 'frequency')\n\n    @staticmethod\n    def set_max_frequency(obj, value, core):\n        obj.set_param(obj, value, core, 'max_frequency')\n\n    @staticmethod\n    def set_min_frequency(obj, value, core):\n        obj.set_param(obj, value, core, 'min_frequency')\n\n    @staticmethod\n    def set_governor(obj, value, core):\n        obj.set_param(obj, value, core, 'governor')\n\n    @staticmethod\n    def set_governor_tunables(obj, value, core):\n        obj.set_param(obj, value, core, 'governor_tunables')\n\n    @staticmethod\n    def set_param(obj, value, core, parameter):\n        '''Method to store passed parameter if it is not already specified for that cpu'''\n        cpus = resolve_unique_domain_cpus(core, obj.target)\n        for cpu in cpus:\n            if parameter in obj.config[cpu]:\n                msg = 'Cannot set \"{}\" for core \"{}\"; Parameter for CPU{} has already been set'\n                raise ConfigError(msg.format(parameter, core, cpu))\n            obj.config[cpu][parameter] = value\n\n    def __init__(self, target):\n        self.config = defaultdict(dict)\n        self.supported_cpu_freqs = {}\n        self.supported_cpu_governors = {}\n        super(CpufreqRuntimeConfig, self).__init__(target)\n\n    def initialize(self):\n        # pylint: disable=too-many-statements\n        if not self.target.has('cpufreq'):\n            return\n\n        self._retrive_cpufreq_info()\n        _, common_freqs, common_gov = self._get_common_values()\n\n        # Add common parameters if available.\n        freq_val = FreqValue(common_freqs)\n        param_name = 'frequency'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(\n                param_name, kind=freq_val,\n                setter=self.set_frequency,\n                setter_params={'core': None},\n                description=\"\"\"\n                The desired frequency for all cores\n                \"\"\")\n        param_name = 'max_frequency'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(\n                param_name, kind=freq_val,\n                setter=self.set_max_frequency,\n                setter_params={'core': None},\n                description=\"\"\"\n                The maximum frequency for all cores\n                \"\"\")\n        param_name = 'min_frequency'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(\n                param_name, kind=freq_val,\n                setter=self.set_min_frequency,\n                setter_params={'core': None},\n                description=\"\"\"\n                The minimum frequency for all cores\n                \"\"\")\n\n        if common_gov:\n            param_name = 'governor'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=str,\n                    allowed_values=common_gov,\n                    setter=self.set_governor,\n                    setter_params={'core': None},\n                    description=\"\"\"\n                    The governor to be set for all cores\n                    \"\"\")\n\n        param_name = 'gov_tunables'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(\n                param_name, kind=dict,\n                merge=True,\n                setter=self.set_governor_tunables,\n                setter_params={'core': None},\n                aliases=['governor_tunables'],\n                description=\"\"\"\n                The governor tunables to be set for all cores\n                \"\"\")\n\n        # Add core name parameters\n        for name in unique(self.target.platform.core_names):\n            cpu = resolve_unique_domain_cpus(name, self.target)[0]\n            freq_val = FreqValue(self.supported_cpu_freqs.get(cpu))\n            avail_govs = self.supported_cpu_governors.get(cpu)\n\n            param_name = '{}_frequency'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_frequency,\n                    setter_params={'core': name},\n                    description=\"\"\"\n                    The desired frequency for the {} cores\n                    \"\"\".format(name))\n            param_name = '{}_max_frequency'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_max_frequency,\n                    setter_params={'core': name},\n                    description=\"\"\"\n                    The maximum frequency for the {} cores\n                    \"\"\".format(name))\n            param_name = '{}_min_frequency'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_min_frequency,\n                    setter_params={'core': name},\n                    description=\"\"\"\n                    The minimum frequency for the {} cores\n                    \"\"\".format(name))\n            param_name = '{}_governor'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=str,\n                    allowed_values=avail_govs,\n                    setter=self.set_governor,\n                    setter_params={'core': name},\n                    description=\"\"\"\n                    The governor to be set for the {} cores\n                    \"\"\".format(name))\n            param_name = '{}_gov_tunables'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=dict,\n                    setter=self.set_governor_tunables,\n                    setter_params={'core': name},\n                    merge=True,\n                    description=\"\"\"\n                    The governor tunables to be set for the {} cores\n                    \"\"\".format(name))\n\n        # Add cpuX parameters.\n        for cpu_no in range(self.target.number_of_cpus):\n            freq_val = FreqValue(self.supported_cpu_freqs.get(cpu_no))\n            avail_govs = self.supported_cpu_governors.get(cpu_no)\n\n            param_name = 'cpu{}_frequency'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_frequency,\n                    setter_params={'core': cpu_no},\n                    description=\"\"\"\n                    The desired frequency for cpu{}\n                    \"\"\".format(cpu_no))\n            param_name = 'cpu{}_max_frequency'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_max_frequency,\n                    setter_params={'core': cpu_no},\n                    description=\"\"\"\n                    The maximum frequency for cpu{}\n                    \"\"\".format(cpu_no))\n            param_name = 'cpu{}_min_frequency'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=freq_val,\n                    setter=self.set_min_frequency,\n                    setter_params={'core': cpu_no},\n                    description=\"\"\"\n                    The minimum frequency for cpu{}\n                    \"\"\".format(cpu_no))\n            param_name = 'cpu{}_governor'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=str,\n                    allowed_values=avail_govs,\n                    setter=self.set_governor,\n                    setter_params={'core': cpu_no},\n                    description=\"\"\"\n                    The governor to be set for cpu{}\n                    \"\"\".format(cpu_no))\n            param_name = 'cpu{}_gov_tunables'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=dict,\n                    setter=self.set_governor_tunables,\n                    setter_params={'core': cpu_no},\n                    merge=True,\n                    description=\"\"\"\n                    The governor tunables to be set for cpu{}\n                    \"\"\".format(cpu_no))\n\n        # Add big.little cores if present on device.\n        if self.target.has('bl'):\n            for cluster in ['big', 'little']:\n                cpu = resolve_unique_domain_cpus(cluster, self.target)[0]\n                freq_val = FreqValue(self.supported_cpu_freqs.get(cpu))\n                avail_govs = self.supported_cpu_governors.get(cpu)\n                param_name = '{}_frequency'.format(cluster)\n\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=freq_val,\n                        setter=self.set_frequency,\n                        setter_params={'core': cluster},\n                        description=\"\"\"\n                        The desired frequency for the {} cluster\n                        \"\"\".format(cluster))\n                param_name = '{}_max_frequency'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=freq_val,\n                        setter=self.set_max_frequency,\n                        setter_params={'core': cluster},\n                        description=\"\"\"\n                        The maximum frequency for the {} cluster\n                        \"\"\".format(cluster))\n                param_name = '{}_min_frequency'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=freq_val,\n                        setter=self.set_min_frequency,\n                        setter_params={'core': cluster},\n                        description=\"\"\"\n                        The minimum frequency for the {} cluster\n                        \"\"\".format(cluster))\n                param_name = '{}_governor'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=str,\n                        allowed_values=avail_govs,\n                        setter=self.set_governor,\n                        setter_params={'core': cluster},\n                        description=\"\"\"\n                        The governor to be set for the {} cores\n                        \"\"\".format(cluster))\n                param_name = '{}_gov_tunables'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=dict,\n                        setter=self.set_governor_tunables,\n                        setter_params={'core': cluster},\n                        merge=True,\n                        description=\"\"\"\n                        The governor tunables to be set for the {} cores\n                        \"\"\".format(cluster))\n\n    def check_target(self):\n        if not self.target.has('cpufreq'):\n            raise TargetError('Target does not appear to support cpufreq')\n\n    def validate_parameters(self):\n        '''Method to validate parameters against each other'''\n        for cpu in self.config:\n            config = self.config[cpu]\n            minf = config.get('min_frequency')\n            maxf = config.get('max_frequency')\n            freq = config.get('frequency')\n\n            if freq and minf:\n                msg = 'CPU{}: Can\\'t set both cpu frequency and minimum frequency'\n                raise ConfigError(msg.format(cpu))\n            if freq and maxf:\n                msg = 'CPU{}: Can\\'t set both cpu frequency and maximum frequency'\n                raise ConfigError(msg.format(cpu))\n\n            if (maxf and minf) and self._resolve_freq(minf, cpu) > self._resolve_freq(maxf, cpu):\n                msg = 'CPU{}: min_frequency \"{}\" cannot be greater than max_frequency \"{}\"'\n                raise ConfigError(msg.format(cpu, minf, maxf))\n\n    def commit(self):\n        for cpu in self.config:\n            config = self.config[cpu]\n            freq = self._resolve_freq(config.get('frequency'), cpu)\n            minf = self._resolve_freq(config.get('min_frequency'), cpu)\n            maxf = self._resolve_freq(config.get('max_frequency'), cpu)\n\n            self.configure_governor(cpu,\n                                    config.get('governor'),\n                                    config.get('governor_tunables'))\n            self.configure_frequency(cpu, freq, minf, maxf, config.get('governor'))\n\n    def clear(self):\n        self.config = defaultdict(dict)\n\n    def configure_governor(self, cpu, governor=None, gov_tunables=None):\n        if not governor and not gov_tunables:\n            return\n        if cpu not in self.target.list_online_cpus():\n            msg = 'Cannot configure governor for {} as no CPUs are online.'\n            raise TargetError(msg.format(cpu))\n        if not governor:\n            governor = self.target.get_governor(cpu)\n        if not gov_tunables:\n            gov_tunables = {}\n        self.target.cpufreq.set_governor(cpu, governor, **gov_tunables)\n\n    def configure_frequency(self, cpu, freq=None, min_freq=None, max_freq=None, governor=None):\n        if freq and (min_freq or max_freq):\n            msg = 'Cannot specify both frequency and min/max frequency'\n            raise ConfigError(msg)\n\n        if cpu not in self.target.list_online_cpus():\n            msg = 'Cannot configure frequencies for CPU{} as no CPUs are online.'\n            raise TargetError(msg.format(cpu))\n\n        if freq:\n            self._set_frequency(cpu, freq, governor)\n        else:\n            self._set_min_max_frequencies(cpu, min_freq, max_freq)\n\n    def _resolve_freq(self, value, cpu):\n        if value == 'min':\n            value = self.target.cpufreq.get_min_available_frequency(cpu)\n        elif value == 'max':\n            value = self.target.cpufreq.get_max_available_frequency(cpu)\n        return value\n\n    def _set_frequency(self, cpu, freq, governor):\n        if not governor:\n            governor = self.target.cpufreq.get_governor(cpu)\n        has_userspace = governor == 'userspace'\n\n        # Sets all frequency to be to desired frequency\n        if freq < self.target.cpufreq.get_frequency(cpu):\n            self.target.cpufreq.set_min_frequency(cpu, freq)\n            if has_userspace:\n                self.target.cpufreq.set_frequency(cpu, freq)\n            self.target.cpufreq.set_max_frequency(cpu, freq)\n        else:\n            self.target.cpufreq.set_max_frequency(cpu, freq)\n            if has_userspace:\n                self.target.cpufreq.set_frequency(cpu, freq)\n            self.target.cpufreq.set_min_frequency(cpu, freq)\n\n    def _set_min_max_frequencies(self, cpu, min_freq, max_freq):\n        min_freq_set = False\n        current_min_freq = self.target.cpufreq.get_min_frequency(cpu)\n        current_max_freq = self.target.cpufreq.get_max_frequency(cpu)\n        if max_freq:\n            if max_freq < current_min_freq:\n                if min_freq:\n                    self.target.cpufreq.set_min_frequency(cpu, min_freq)\n                    self.target.cpufreq.set_max_frequency(cpu, max_freq)\n                    min_freq_set = True\n                else:\n                    msg = 'CPU {}: Cannot set max_frequency ({}) below current min frequency ({}).'\n                    raise ConfigError(msg.format(cpu, max_freq, current_min_freq))\n            else:\n                self.target.cpufreq.set_max_frequency(cpu, max_freq)\n        if min_freq and not min_freq_set:\n            current_max_freq = max_freq or current_max_freq\n            if min_freq > current_max_freq:\n                msg = 'CPU {}: Cannot set min_frequency ({}) above current max frequency ({}).'\n                raise ConfigError(msg.format(cpu, min_freq, current_max_freq))\n            self.target.cpufreq.set_min_frequency(cpu, min_freq)\n\n    def _retrive_cpufreq_info(self):\n        '''\n        Tries to retrieve cpu freq information for all cpus on device.\n        For each cpu domain, only one cpu is queried for information and\n        duplicated across related cpus. This is to reduce calls to the target\n        and as long as one core per domain is online the remaining cpus information\n        can still be populated.\n        '''\n        for cluster_cpu in resolve_unique_domain_cpus('all', self.target):\n            domain_cpus = self.target.cpufreq.get_related_cpus(cluster_cpu)\n            for cpu in domain_cpus:\n                if cpu in self.target.list_online_cpus():\n                    supported_cpu_freqs = self.target.cpufreq.list_frequencies(cpu)\n                    supported_cpu_governors = self.target.cpufreq.list_governors(cpu)\n                    break\n            else:\n                msg = 'CPUFreq information could not be retrieved for{};'\\\n                      'Will not be validated against device.'\n                logger.debug(msg.format(' CPU{},'.format(cpu for cpu in domain_cpus)))\n                return\n\n            for cpu in domain_cpus:\n                self.supported_cpu_freqs[cpu] = supported_cpu_freqs\n                self.supported_cpu_governors[cpu] = supported_cpu_governors\n\n    def _get_common_values(self):\n        ''' Find common values for frequency and governors across all cores'''\n        common_freqs = None\n        common_gov = None\n        all_freqs = None\n        initialized = False\n        for cpu in resolve_unique_domain_cpus('all', self.target):\n            if not initialized:\n                initialized = True\n                common_freqs = set(self.supported_cpu_freqs.get(cpu) or [])\n                all_freqs = copy(common_freqs)\n                common_gov = set(self.supported_cpu_governors.get(cpu) or [])\n            else:\n                common_freqs = common_freqs.intersection(self.supported_cpu_freqs.get(cpu) or set())\n                all_freqs = all_freqs.union(self.supported_cpu_freqs.get(cpu) or set())\n                common_gov = common_gov.intersection(self.supported_cpu_governors.get(cpu) or set())\n\n        return all_freqs, common_freqs, common_gov\n\n\nclass IdleStateValue(object):\n\n    def __init__(self, values):\n        if values is None:\n            self.values = values\n        else:\n            self.values = [(value.id, value.name, value.desc) for value in values]\n\n    def __call__(self, value):\n        if self.values is None:\n            return value\n\n        if isinstance(value, str):\n            value = caseless_string(value)\n            if value == 'all':\n                return [state[0] for state in self.values]\n            elif value == 'none':\n                return []\n            else:\n                return [self._get_state_ID(value)]\n\n        elif isinstance(value, list):\n            valid_states = []\n            for state in value:\n                valid_states.append(self._get_state_ID(state))\n            return valid_states\n        else:\n            raise ValueError('Invalid IdleState: \"{}\"'.format(value))\n\n    def _get_state_ID(self, value):\n        '''Checks passed state and converts to its ID'''\n        value = caseless_string(value)\n        for s_id, s_name, s_desc in self.values:\n            if value in (s_id, s_name, s_desc):\n                return s_id\n        msg = 'Invalid IdleState: \"{}\"; Must be in {}'\n        raise ValueError(msg.format(value, self.values))\n\n    def __str__(self):\n        return 'valid idle state: \"{}\"'.format(self.values).replace('\\'', '')\n\n\nclass CpuidleRuntimeConfig(RuntimeConfig):\n\n    name = 'rt-cpuidle'\n\n    @staticmethod\n    def set_idle_state(obj, value, core):\n        cpus = resolve_cpus(core, obj.target)\n        for cpu in cpus:\n            obj.config[cpu] = []\n            for state in value:\n                obj.config[cpu].append(state)\n\n    def __init__(self, target):\n        self.config = defaultdict(dict)\n        self.supported_idle_states = {}\n        super(CpuidleRuntimeConfig, self).__init__(target)\n\n    def initialize(self):\n        if not self.target.has('cpuidle'):\n            return\n\n        self._retrieve_device_idle_info()\n\n        common_idle_states = self._get_common_idle_values()\n        idle_state_val = IdleStateValue(common_idle_states)\n\n        if common_idle_states:\n            param_name = 'idle_states'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=idle_state_val,\n                    setter=self.set_idle_state,\n                    setter_params={'core': None},\n                    description=\"\"\"\n                    The idle states to be set for all cores\n                    \"\"\")\n\n        for name in unique(self.target.platform.core_names):\n            cpu = resolve_cpus(name, self.target)[0]\n            idle_state_val = IdleStateValue(self.supported_idle_states.get(cpu))\n            param_name = '{}_idle_states'.format(name)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=idle_state_val,\n                    setter=self.set_idle_state,\n                    setter_params={'core': name},\n                    description=\"\"\"\n                    The idle states to be set for {} cores\n                    \"\"\".format(name))\n\n        for cpu_no in range(self.target.number_of_cpus):\n            idle_state_val = IdleStateValue(self.supported_idle_states.get(cpu_no))\n            param_name = 'cpu{}_idle_states'.format(cpu_no)\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=idle_state_val,\n                    setter=self.set_idle_state,\n                    setter_params={'core': cpu_no},\n                    description=\"\"\"\n                    The idle states to be set for cpu{}\n                    \"\"\".format(cpu_no))\n\n        if self.target.has('bl'):\n            for cluster in ['big', 'little']:\n                cpu = resolve_cpus(cluster, self.target)[0]\n                idle_state_val = IdleStateValue(self.supported_idle_states.get(cpu))\n                param_name = '{}_idle_states'.format(cluster)\n                self._runtime_params[param_name] = \\\n                    RuntimeParameter(\n                        param_name, kind=idle_state_val,\n                        setter=self.set_idle_state,\n                        setter_params={'core': cluster},\n                        description=\"\"\"\n                        The idle states to be set for the {} cores\n                        \"\"\".format(cluster))\n\n    def check_target(self):\n        if not self.target.has('cpuidle'):\n            raise TargetError('Target does not appear to support cpuidle')\n\n    def validate_parameters(self):\n        return\n\n    def clear(self):\n        self.config = defaultdict(dict)\n\n    def commit(self):\n        for cpu in self.config:\n            idle_states = set(state.id for state in self.supported_idle_states.get(cpu, []))\n            enabled = self.config[cpu]\n            disabled = idle_states.difference(enabled)\n            for state in enabled:\n                self.target.cpuidle.enable(state, cpu)\n            for state in disabled:\n                self.target.cpuidle.disable(state, cpu)\n\n    def _retrieve_device_idle_info(self):\n        for cpu in range(self.target.number_of_cpus):\n            self.supported_idle_states[cpu] = self.target.cpuidle.get_states(cpu)\n\n    def _get_common_idle_values(self):\n        '''Find common values for cpu idle states across all cores'''\n        common_idle_states = []\n        for cpu in range(self.target.number_of_cpus):\n            for state in self.supported_idle_states.get(cpu) or []:\n                if state.name not in common_idle_states:\n                    common_idle_states.append(state)\n        return common_idle_states\n\n\nScreenOrientation = enum(['NATURAL', 'LEFT', 'INVERTED', 'RIGHT'])\n\n\nclass AndroidRuntimeConfig(RuntimeConfig):\n\n    name = 'rt-android'\n\n    @staticmethod\n    def set_brightness(obj, value):\n        if value is not None:\n            obj.config['brightness'] = value\n\n    @staticmethod\n    def set_airplane_mode(obj, value):\n        if value is not None:\n            obj.config['airplane_mode'] = value\n\n    @staticmethod\n    def set_rotation(obj, value):\n        if value is not None:\n            obj.config['rotation'] = value.value\n\n    @staticmethod\n    def set_screen_state(obj, value):\n        if value is not None:\n            obj.config['screen_on'] = value\n\n    @staticmethod\n    def set_unlock_screen(obj, value):\n        if value is not None:\n            obj.config['unlock_screen'] = value\n\n    def __init__(self, target):\n        self.config = defaultdict(dict)\n        super(AndroidRuntimeConfig, self).__init__(target)\n\n    def initialize(self):\n        if self.target.os not in ['android', 'chromeos']:\n            return\n        if self.target.os == 'chromeos' and not self.target.supports_android:\n            return\n\n        param_name = 'brightness'\n        self._runtime_params[param_name] = \\\n            RuntimeParameter(\n                param_name, kind=int,\n                constraint=lambda x: 0 <= x <= 255,\n                default=127,\n                setter=self.set_brightness,\n                description=\"\"\"\n                Specify the screen brightness to be set for\n                the device\n                \"\"\")\n\n        if self.target.os == 'android':\n            param_name = 'airplane_mode'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=bool,\n                    setter=self.set_airplane_mode,\n                    description=\"\"\"\n                    Specify whether airplane mode should be\n                    enabled for the device\n                    \"\"\")\n\n            param_name = 'rotation'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=ScreenOrientation,\n                    setter=self.set_rotation,\n                    description=\"\"\"\n                    Specify the screen orientation for the device\n                    \"\"\")\n\n            param_name = 'screen_on'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=bool,\n                    default=True,\n                    setter=self.set_screen_state,\n                    description=\"\"\"\n                    Specify whether the device screen should be on\n                    \"\"\")\n\n            param_name = 'unlock_screen'\n            self._runtime_params[param_name] = \\\n                RuntimeParameter(\n                    param_name, kind=str,\n                    default=None,\n                    setter=self.set_unlock_screen,\n                    description=\"\"\"\n                    Specify how the device screen should be unlocked (e.g., vertical)\n                    \"\"\")\n\n    def check_target(self):\n        if self.target.os != 'android' and self.target.os != 'chromeos':\n            raise ConfigError('Target does not appear to be running Android')\n        if self.target.os == 'chromeos' and not self.target.supports_android:\n            raise ConfigError('Target does not appear to support Android')\n\n    def validate_parameters(self):\n        pass\n\n    def commit(self):\n        # pylint: disable=too-many-branches\n        if 'airplane_mode' in self.config:\n            new_airplane_mode = self.config['airplane_mode']\n            old_airplane_mode = self.target.get_airplane_mode()\n            self.target.set_airplane_mode(new_airplane_mode)\n\n            # If we've just switched airplane mode off, wait a few seconds to\n            # enable the network state to stabilise. That's helpful if we're\n            # about to run a workload that is going to check for network\n            # connectivity.\n            if old_airplane_mode and not new_airplane_mode:\n                self.logger.info('Disabled airplane mode, waiting up to 20 seconds for network setup')\n                network_is_ready = False\n                for _ in range(4):\n                    time.sleep(5)\n                    network_is_ready = self.target.is_network_connected()\n                    if network_is_ready:\n                        break\n                if network_is_ready:\n                    self.logger.info(\"Found a network\")\n                else:\n                    self.logger.warning(\"Network unreachable\")\n\n        if 'brightness' in self.config:\n            self.target.set_brightness(self.config['brightness'])\n\n        if 'rotation' in self.config:\n            self.target.set_rotation(self.config['rotation'])\n\n        if 'screen_on' in self.config:\n            if self.config['screen_on']:\n                self.target.ensure_screen_is_on()\n            else:\n                self.target.ensure_screen_is_off()\n\n        if self.config.get('unlock_screen'):\n            self.target.ensure_screen_is_on()\n            if self.target.is_screen_locked():\n                self.target.swipe_to_unlock(self.config['unlock_screen'])\n\n    def clear(self):\n        self.config = {}\n"
  },
  {
    "path": "wa/framework/target/runtime_parameter_manager.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom collections import namedtuple\n\nfrom wa.framework.exception import ConfigError\nfrom wa.framework.target.runtime_config import (SysfileValuesRuntimeConfig,\n                                                HotplugRuntimeConfig,\n                                                CpufreqRuntimeConfig,\n                                                CpuidleRuntimeConfig,\n                                                AndroidRuntimeConfig)\nfrom wa.utils.types import obj_dict, caseless_string\nfrom wa.framework import pluginloader\n\n\nclass RuntimeParameterManager(object):\n\n    runtime_config_cls = [\n        # order matters\n        SysfileValuesRuntimeConfig,\n        HotplugRuntimeConfig,\n        CpufreqRuntimeConfig,\n        CpuidleRuntimeConfig,\n        AndroidRuntimeConfig,\n    ]\n\n    def __init__(self, target):\n        self.target = target\n        self.runtime_params = {}\n\n        try:\n            for rt_cls in pluginloader.list_plugins(kind='runtime-config'):\n                if rt_cls not in self.runtime_config_cls:\n                    self.runtime_config_cls.append(rt_cls)\n        except ValueError:\n            pass\n        self.runtime_configs = [cls(self.target) for cls in self.runtime_config_cls]\n\n        runtime_parameter = namedtuple('RuntimeParameter', 'cfg_point, rt_config')\n        for cfg in self.runtime_configs:\n            for param in cfg.supported_parameters:\n                if param.name in self.runtime_params:\n                    msg = 'Duplicate runtime parameter name \"{}\": in both {} and {}'\n                    raise RuntimeError(msg.format(param.name,\n                                                  self.runtime_params[param.name].rt_config.name,\n                                                  cfg.name))\n                self.runtime_params[param.name] = runtime_parameter(param, cfg)\n\n    # Uses corresponding config point to merge parameters\n    def merge_runtime_parameters(self, parameters):\n        merged_params = obj_dict()\n        for source in parameters:\n            for name, value in parameters[source].items():\n                cp = self.get_cfg_point(name)\n                cp.set_value(merged_params, value)\n        return dict(merged_params)\n\n    # Validates runtime_parameters against each other\n    def validate_runtime_parameters(self, parameters):\n        self.clear_runtime_parameters()\n        self.set_runtime_parameters(parameters)\n        for cfg in self.runtime_configs:\n            cfg.validate_parameters()\n\n    # Writes the given parameters to the device.\n    def commit_runtime_parameters(self, parameters):\n        self.clear_runtime_parameters()\n        self.set_runtime_parameters(parameters)\n        for cfg in self.runtime_configs:\n            cfg.commit()\n\n    # Stores a set of parameters performing isolated validation when appropriate\n    def set_runtime_parameters(self, parameters):\n        for name, value in parameters.items():\n            cfg = self.get_config_for_name(name)\n            if cfg is None:\n                msg = 'Unsupported runtime parameter: \"{}\"'\n                raise ConfigError(msg.format(name))\n            cfg.set_runtime_parameter(name, value)\n\n    def clear_runtime_parameters(self):\n        for cfg in self.runtime_configs:\n            cfg.clear()\n            cfg.set_defaults()\n\n    def get_config_for_name(self, name):\n        name = caseless_string(name)\n        for k, v in self.runtime_params.items():\n            if name == k:\n                return v.rt_config\n        return None\n\n    def get_cfg_point(self, name):\n        name = caseless_string(name)\n        for k, v in self.runtime_params.items():\n            if name == k or name in v.cfg_point.aliases:\n                return v.cfg_point\n        raise ConfigError('Unknown runtime parameter: {}'.format(name))\n"
  },
  {
    "path": "wa/framework/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.library'\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion '28.0.3'\n    defaultConfig {\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n}\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto\">\n\n    <uses-permission android:name=\"android.permission.READ_LOGS\"/>\n\n    <application>\n        <uses-library android:name=\"android.test.runner\"/>\n    </application>\n</manifest>\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/java/com/arm/wa/uiauto/ActionLogger.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto;\n\nimport android.os.Bundle;\nimport android.util.Log;\n    /**\n     * Basic marker API for workloads to generate start and end markers for\n     * deliminating and timing actions. Markers are output to logcat with debug\n     * priority. Actions represent a series of UI interactions to time.\n     *\n     * The marker API provides a way for instruments and output processors to hook into\n     * per-action timings by parsing logcat logs produced per workload iteration.\n     *\n     * The marker output consists of a logcat tag 'UX_PERF' and a message. The\n     * message consists of a name for the action and a timestamp. The timestamp\n     * is separated by a single space from the name of the action.\n     *\n     * Typical usage:\n     *\n     * ActionLogger logger = ActionLogger(\"testTag\", parameters);\n     * logger.start();\n     * // actions to be recorded\n     * logger.stop();\n     */\n    public class ActionLogger {\n\n        private String testTag;\n        private boolean enabled;\n\n        public ActionLogger(String testTag, Bundle parameters) {\n            this.testTag = testTag;\n            this.enabled = parameters.getBoolean(\"markers_enabled\");\n        }\n\n        public void start() {\n            if (enabled) {\n                Log.d(\"UX_PERF\", testTag + \" start \" + System.nanoTime());\n            }\n        }\n\n        public void stop() throws Exception {\n            if (enabled) {\n                Log.d(\"UX_PERF\", testTag + \" end \" + System.nanoTime());\n            }\n        }\n    }\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/java/com/arm/wa/uiauto/ApplaunchInterface.java",
    "content": "/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto;\n\nimport android.os.Bundle;\nimport android.support.test.uiautomator.UiObject;\n\n/**\n * ApplaunchInterface.java\n * Interface used for enabling uxperfapplaunch workload.\n * This interface gets implemented by all workloads that support application launch\n * instrumentation.\n */\n\npublic interface ApplaunchInterface {\n\n    /**\n     * Sets the launchEndObject of a workload, which is a UiObject that marks\n     * the end of the application launch.\n     */\n    public UiObject getLaunchEndObject();\n\n    /**\n     * Runs the Uiautomation methods for clearing the initial run\n     * dialogues on the first time installation of an application package.\n     */\n    public void runApplicationSetup() throws Exception;\n\n    /**\n     * Provides the application launch command of the application which is\n     * constructed as a string from the workload.\n     */\n    public String getLaunchCommand();\n\n    /** Passes the workload parameters. */\n    public void setWorkloadParameters(Bundle parameters);\n\n    /** Initialize the instrumentation for the workload */\n    public void initialize_instrumentation();\n\n}\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/java/com/arm/wa/uiauto/BaseUiAutomation.java",
    "content": "/*    Copyright 2013-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto;\n\nimport android.os.Bundle;\nimport android.os.SystemClock;\nimport android.app.Instrumentation;\nimport android.content.Context;\nimport android.graphics.Point;\nimport android.graphics.Rect;\n\nimport android.support.test.InstrumentationRegistry;\nimport android.support.test.uiautomator.UiDevice;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiWatcher;\nimport android.support.test.uiautomator.UiScrollable;\n\nimport org.junit.Before;\nimport org.junit.Test;\n\nimport java.io.BufferedReader;\nimport java.io.File;\nimport java.io.InputStreamReader;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\nimport static android.support.test.InstrumentationRegistry.getArguments;\n\npublic class BaseUiAutomation {\n\n    public enum FindByCriteria { BY_ID, BY_TEXT, BY_DESC };\n    public enum Direction { UP, DOWN, LEFT, RIGHT, NULL };\n    public enum ScreenOrientation { RIGHT, NATURAL, LEFT, PORTRAIT, LANDSCAPE };\n    public enum PinchType { IN, OUT, NULL };\n\n    // Time in milliseconds\n    public long uiAutoTimeout = 4 * 1000;\n\n    public static final int CLICK_REPEAT_INTERVAL_MINIMUM = 5;\n    public static final int CLICK_REPEAT_INTERVAL_DEFAULT = 50;\n\n    public Instrumentation mInstrumentation;\n    public Context mContext;\n    public UiDevice mDevice;\n\n    @Before\n    public void initialize_instrumentation() {\n        mInstrumentation = InstrumentationRegistry.getInstrumentation();\n        mDevice = UiDevice.getInstance(mInstrumentation);\n        mContext = mInstrumentation.getTargetContext();\n    }\n\n    @Test\n    public void setup() throws Exception {\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n    }\n\n    @Test\n    public void teardown() throws Exception {\n    }\n\n    public void sleep(int second) {\n        SystemClock.sleep(second * 1000);\n    }\n\n    // Generate a package ID\n    public String getPackageID(Bundle parameters) {\n        String packageName = parameters.getString(\"package_name\");\n        return packageName + \":id/\";\n    }\n\n    public boolean takeScreenshot(String name) {\n        Bundle params = getArguments();\n        String png_dir = params.getString(\"workdir\");\n\n        try {\n            return mDevice.takeScreenshot(new File(png_dir, name + \".png\"));\n        } catch (NoSuchMethodError e) {\n            return true;\n        }\n    }\n\n    public void waitText(String text) throws UiObjectNotFoundException {\n        waitText(text, 600);\n    }\n\n    public void waitText(String text, int second) throws UiObjectNotFoundException {\n        UiSelector selector = new UiSelector();\n        UiObject text_obj = mDevice.findObject(selector.text(text)\n                                                       .className(\"android.widget.TextView\"));\n        waitObject(text_obj, second);\n    }\n\n    public void waitObject(UiObject obj) throws UiObjectNotFoundException {\n        waitObject(obj, 600);\n    }\n\n    public void waitObject(UiObject obj, int second) throws UiObjectNotFoundException {\n        if (!obj.waitForExists(second * 1000)) {\n            throw new UiObjectNotFoundException(\"UiObject is not found: \"\n                    + obj.getSelector().toString());\n        }\n    }\n\n    public boolean waitUntilNoObject(UiObject obj, int second) {\n        return obj.waitUntilGone(second * 1000);\n    }\n\n    public void clearLogcat() throws Exception {\n        Runtime.getRuntime().exec(\"logcat -c\");\n    }\n\n    public void waitForLogcatText(String searchText, long timeout) throws Exception {\n        long startTime = System.currentTimeMillis();\n        Process process = Runtime.getRuntime().exec(\"logcat\");\n        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));\n        String line;\n\n        long currentTime = System.currentTimeMillis();\n        boolean found = false;\n        while ((currentTime - startTime) < timeout) {\n            sleep(2);  // poll every two seconds\n\n            while ((line = reader.readLine()) != null) {\n                if (line.contains(searchText)) {\n                    found = true;\n                    break;\n                }\n            }\n\n            if (found) {\n                break;\n            }\n            currentTime = System.currentTimeMillis();\n        }\n\n        process.destroy();\n\n        if ((currentTime - startTime) >= timeout) {\n            throw new TimeoutException(\"Timed out waiting for Logcat text \\\"%s\\\"\".format(searchText));\n        }\n    }\n\n    public void registerWatcher(String name, UiWatcher watcher) {\n        mDevice.registerWatcher(name, watcher);\n    }\n\n    public void runWatchers() {\n        mDevice.runWatchers();\n    }\n\n    public void removeWatcher(String name) {\n        mDevice.removeWatcher(name);\n    }\n\n    public void setScreenOrientation(ScreenOrientation orientation) throws Exception {\n        int width = mDevice.getDisplayWidth();\n        int height = mDevice.getDisplayHeight();\n        switch (orientation) {\n            case RIGHT:\n                mDevice.setOrientationRight();\n                break;\n            case NATURAL:\n                mDevice.setOrientationNatural();\n                break;\n            case LEFT:\n                mDevice.setOrientationLeft();\n                break;\n            case LANDSCAPE:\n                if (mDevice.isNaturalOrientation()){\n                    if (height > width){\n                        mDevice.setOrientationRight();\n                    }\n                }\n                else {\n                    if (height > width){\n                        mDevice.setOrientationNatural();\n                    }\n                }\n                break;\n            case PORTRAIT:\n                if (mDevice.isNaturalOrientation()){\n                    if (height < width){\n                        mDevice.setOrientationRight();\n                    }\n                }\n                else {\n                    if (height < width){\n                        mDevice.setOrientationNatural();\n                    }\n                }\n                break;\n            default:\n                throw new Exception(\"No orientation specified\");\n        }\n    }\n\n    public void unsetScreenOrientation() throws Exception {\n        mDevice.unfreezeRotation();\n    }\n\n    public void uiObjectPerformLongClick(UiObject view, int steps) throws Exception {\n        Rect rect = view.getBounds();\n        mDevice.swipe(rect.centerX(), rect.centerY(),\n                      rect.centerX(), rect.centerY(), steps);\n    }\n\n    public int getDisplayHeight() {\n        return mDevice.getDisplayHeight();\n    }\n\n    public int getDisplayWidth() {\n        return mDevice.getDisplayWidth();\n    }\n\n    public int getDisplayCentreWidth() {\n        return getDisplayWidth() / 2;\n    }\n\n    public int getDisplayCentreHeight() {\n        return getDisplayHeight() / 2;\n    }\n\n    public void tapDisplayCentre() {\n        tapDisplay(getDisplayCentreWidth(),  getDisplayCentreHeight());\n    }\n\n    public void tapDisplay(int x, int y) {\n        mDevice.click(x, y);\n    }\n\n    public void pressEnter() {\n        mDevice.pressEnter();\n    }\n\n    public void pressHome() {\n        mDevice.pressHome();\n    }\n\n    public void pressBack() {\n        mDevice.pressBack();\n    }\n\n    public void uiObjectSwipe(UiObject view, Direction direction, int steps) throws Exception {\n        switch (direction) {\n            case UP:\n                view.swipeUp(steps);\n                break;\n            case DOWN:\n                view.swipeDown(steps);\n                break;\n            case LEFT:\n                view.swipeLeft(steps);\n                break;\n            case RIGHT:\n                view.swipeRight(steps);\n                break;\n            case NULL:\n                throw new Exception(\"No direction specified\");\n            default:\n                break;\n        }\n    }\n\n    public void uiDeviceSwipeVertical(int startY, int endY, int xCoordinate, int steps) {\n        mDevice.swipe(xCoordinate, startY, xCoordinate, endY, steps);\n    }\n\n    public void uiDeviceSwipeHorizontal(int startX, int endX, int yCoordinate, int steps) {\n        mDevice.swipe(startX, yCoordinate, endX, yCoordinate, steps);\n    }\n\n    public void uiObjectVertPinchIn(UiObject view, int steps, int percent) throws Exception {\n        final int FINGER_TOUCH_HALF_WIDTH = 20;\n\n        // Make value between 1 and 100\n        int nPercent = (percent < 0) ? 1 : (percent > 100) ? 100 : percent;\n        float percentage = nPercent / 100f;\n\n        Rect rect = view.getVisibleBounds();\n\n        if (rect.width() <= FINGER_TOUCH_HALF_WIDTH * 2) {\n            throw new IllegalStateException(\"Object width is too small for operation\");\n        }\n\n        // Start at the top-center and bottom-center of the control\n        Point startPoint1 = new Point(rect.centerX(), rect.centerY()\n                          + (int) ((rect.height() / 2) * percentage));\n        Point startPoint2 = new Point(rect.centerX(), rect.centerY()\n                          - (int) ((rect.height() / 2) * percentage));\n\n        // End at the same point at the center of the control\n        Point endPoint1 = new Point(rect.centerX(), rect.centerY() + FINGER_TOUCH_HALF_WIDTH);\n        Point endPoint2 = new Point(rect.centerX(), rect.centerY() - FINGER_TOUCH_HALF_WIDTH);\n\n        view.performTwoPointerGesture(startPoint1, startPoint2, endPoint1, endPoint2, steps);\n    }\n\n    public void uiObjectVertPinchOut(UiObject view, int steps, int percent) throws Exception {\n        final int FINGER_TOUCH_HALF_WIDTH = 20;\n\n        // Make value between 1 and 100\n        int nPercent = (percent < 0) ? 1 : (percent > 100) ? 100 : percent;\n        float percentage = nPercent / 100f;\n\n        Rect rect = view.getVisibleBounds();\n\n        if (rect.width() <= FINGER_TOUCH_HALF_WIDTH * 2) {\n            throw new IllegalStateException(\"Object width is too small for operation\");\n        }\n\n        // Start from the same point at the center of the control\n        Point startPoint1 = new Point(rect.centerX(), rect.centerY() + FINGER_TOUCH_HALF_WIDTH);\n        Point startPoint2 = new Point(rect.centerX(), rect.centerY() - FINGER_TOUCH_HALF_WIDTH);\n\n        // End at the top-center and bottom-center of the control\n        Point endPoint1 = new Point(rect.centerX(), rect.centerY()\n                        + (int) ((rect.height() / 2) * percentage));\n        Point endPoint2 = new Point(rect.centerX(), rect.centerY()\n                        - (int) ((rect.height() / 2) * percentage));\n\n        view.performTwoPointerGesture(startPoint1, startPoint2, endPoint1, endPoint2, steps);\n    }\n\n    public void uiObjectVertPinch(UiObject view, PinchType direction,\n                                  int steps, int percent) throws Exception {\n        if (direction.equals(PinchType.IN)) {\n            uiObjectVertPinchIn(view, steps, percent);\n        } else if (direction.equals(PinchType.OUT)) {\n            uiObjectVertPinchOut(view, steps, percent);\n        }\n    }\n\n    public void uiDeviceSwipeUp(int steps) {\n        mDevice.swipe(\n            getDisplayCentreWidth(),\n            (getDisplayCentreHeight() + (getDisplayCentreHeight() / 2)),\n            getDisplayCentreWidth(),\n            (getDisplayCentreHeight() / 2),\n            steps);\n    }\n\n    public void uiDeviceSwipeDown(int steps) {\n        mDevice.swipe(\n            getDisplayCentreWidth(),\n            (getDisplayCentreHeight() / 2),\n            getDisplayCentreWidth(),\n            (getDisplayCentreHeight() + (getDisplayCentreHeight() / 2)),\n            steps);\n    }\n\n    public void uiDeviceSwipeLeft(int steps) {\n        mDevice.swipe(\n            (getDisplayCentreWidth() + (getDisplayCentreWidth() / 2)),\n            getDisplayCentreHeight(),\n            (getDisplayCentreWidth() / 2),\n            getDisplayCentreHeight(),\n            steps);\n    }\n\n    public void uiDeviceSwipeRight(int steps) {\n        mDevice.swipe(\n            (getDisplayCentreWidth() / 2),\n            getDisplayCentreHeight(),\n            (getDisplayCentreWidth() + (getDisplayCentreWidth() / 2)),\n            getDisplayCentreHeight(),\n            steps);\n    }\n\n    public void uiDeviceSwipe(Direction direction, int steps) throws Exception {\n        switch (direction) {\n            case UP:\n                uiDeviceSwipeUp(steps);\n                break;\n            case DOWN:\n                uiDeviceSwipeDown(steps);\n                break;\n            case LEFT:\n                uiDeviceSwipeLeft(steps);\n                break;\n            case RIGHT:\n                uiDeviceSwipeRight(steps);\n                break;\n            case NULL:\n                throw new Exception(\"No direction specified\");\n            default:\n                break;\n        }\n    }\n\n    public void repeatClickUiObject(UiObject view, int repeatCount, int intervalInMillis) throws Exception {\n        int repeatInterval = intervalInMillis > CLICK_REPEAT_INTERVAL_MINIMUM\n                             ? intervalInMillis : CLICK_REPEAT_INTERVAL_DEFAULT;\n        if (repeatCount < 1 || !view.isClickable()) {\n            return;\n        }\n\n        for (int i = 0; i < repeatCount; ++i) {\n            view.click();\n            SystemClock.sleep(repeatInterval); // in order to register as separate click\n        }\n    }\n\n\n    public UiObject clickUiObject(FindByCriteria criteria, String matching) throws Exception {\n        return clickUiObject(criteria, matching, null, false);\n    }\n\n    public UiObject clickUiObject(FindByCriteria criteria, String matching, boolean wait) throws Exception {\n        return clickUiObject(criteria, matching, null, wait);\n    }\n\n    public UiObject clickUiObject(FindByCriteria criteria, String matching, String clazz) throws Exception {\n        return clickUiObject(criteria, matching, clazz, false);\n    }\n\n    public UiObject clickUiObject(FindByCriteria criteria, String matching, String clazz, boolean wait) throws Exception {\n        UiObject view;\n\n        switch (criteria) {\n            case BY_ID:\n                view = (clazz == null)\n                     ? getUiObjectByResourceId(matching) : getUiObjectByResourceId(matching, clazz);\n                break;\n            case BY_DESC:\n                view = (clazz == null)\n                     ? getUiObjectByDescription(matching) : getUiObjectByDescription(matching, clazz);\n                break;\n            case BY_TEXT:\n            default:\n                view = (clazz == null)\n                     ? getUiObjectByText(matching) : getUiObjectByText(matching, clazz);\n                break;\n        }\n\n        if (wait) {\n            view.clickAndWaitForNewWindow();\n        } else {\n            view.click();\n        }\n        return view;\n    }\n\n    public UiObject getUiObjectByResourceId(String resourceId, String className) throws Exception {\n        return getUiObjectByResourceId(resourceId, className, uiAutoTimeout);\n    }\n\n    public UiObject getUiObjectByResourceId(String resourceId, String className, long timeout) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().resourceId(resourceId)\n                .className(className));\n        if (!object.waitForExists(timeout)) {\n            throw new UiObjectNotFoundException(String.format(\"Could not find \\\"%s\\\" \\\"%s\\\"\",\n                    resourceId, className));\n        }\n        return object;\n    }\n\n    public UiObject getUiObjectByResourceId(String id) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().resourceId(id));\n\n        if (!object.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find view with resource ID: \" + id);\n        }\n        return object;\n    }\n\n    public UiObject getUiObjectByDescription(String description, String className) throws Exception {\n        return getUiObjectByDescription(description, className, uiAutoTimeout);\n    }\n\n    public UiObject getUiObjectByDescription(String description, String className, long timeout) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().descriptionContains(description)\n                                                             .className(className));\n        if (!object.waitForExists(timeout)) {\n            throw new UiObjectNotFoundException(String.format(\"Could not find \\\"%s\\\" \\\"%s\\\"\",\n                    description, className));\n        }\n        return object;\n    }\n\n    public UiObject getUiObjectByDescription(String desc) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().descriptionContains(desc));\n\n        if (!object.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find view with description: \" + desc);\n        }\n        return object;\n    }\n\n    public UiObject getUiObjectByText(String text, String className) throws Exception {\n        return getUiObjectByText(text, className, uiAutoTimeout);\n    }\n\n    public UiObject getUiObjectByText(String text, String className, long timeout) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().textContains(text)\n                                                             .className(className));\n        if (!object.waitForExists(timeout)) {\n            throw new UiObjectNotFoundException(String.format(\"Could not find \\\"%s\\\" \\\"%s\\\"\",\n                                                              text, className));\n        }\n        return object;\n    }\n\n    public UiObject getUiObjectByText(String text) throws Exception {\n        UiObject object = mDevice.findObject(new UiSelector().textContains(text));\n\n        if (!object.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find view with text: \" + text);\n        }\n        return object;\n    }\n\n    // Helper to select a folder in the gallery\n    public void selectGalleryFolder(String directory) throws Exception {\n        UiObject workdir =\n                mDevice.findObject(new UiSelector().text(directory)\n                                                   .className(\"android.widget.TextView\"));\n        UiScrollable scrollView =\n                new UiScrollable(new UiSelector().scrollable(true));\n\n        // If the folder is not present wait for a short time for\n        // the media server to refresh its index.\n        boolean discovered = workdir.waitForExists(TimeUnit.SECONDS.toMillis(10));\n        if (!discovered && scrollView.exists()) {\n            // First check if the directory is visible on the first\n            // screen and if not scroll to the bottom of the screen to look for it.\n            discovered = scrollView.scrollIntoView(workdir);\n\n            // If still not discovered scroll back to the top of the screen and\n            // wait for a longer amount of time for the media server to refresh\n            // its index.\n            if (!discovered) {\n                // scrollView.scrollToBeggining() doesn't work for this\n                // particular scrollable view so use device method instead\n                for (int i = 0; i < 10; i++) {\n                    uiDeviceSwipeUp(20);\n                }\n                discovered = workdir.waitForExists(TimeUnit.SECONDS.toMillis(60));\n\n                // Scroll to the bottom of the screen one last time\n                if (!discovered) {\n                    discovered = scrollView.scrollIntoView(workdir);\n                }\n            }\n        }\n\n        if (discovered) {\n            workdir.clickAndWaitForNewWindow();\n        } else {\n            throw new UiObjectNotFoundException(\"Could not find folder : \" + directory);\n        }\n    }\n\n\n    // If an an app is not designed for running on the latest version of android\n    // (currently Q) an additional screen can popup asking to confirm permissions.\n    public void dismissAndroidPermissionPopup() throws Exception {\n        UiObject permissionAccess =\n            mDevice.findObject(new UiSelector().textMatches(\n                \".*Choose what to allow .* to access\"));\n        UiObject continueButton =\n                mDevice.findObject(new UiSelector().resourceId(\"com.android.permissioncontroller:id/continue_button\")\n                                                   .textContains(\"Continue\"));\n        if (permissionAccess.exists() && continueButton.exists()) {\n            continueButton.click();\n        }\n    }\n\n\n    // If an an app is not designed for running on the latest version of android\n    // (currently Q) dissmiss the warning popup if present.\n    public void dismissAndroidVersionPopup() throws Exception {\n\n        // Ensure we have dissmied any permission screens before looking for the version popup\n        dismissAndroidPermissionPopup();\n\n        UiObject warningText =\n            mDevice.findObject(new UiSelector().textContains(\n                \"This app was built for an older version of Android\"));\n        UiObject acceptButton =\n            mDevice.findObject(new UiSelector().resourceId(\"android:id/button1\")\n                                               .className(\"android.widget.Button\"));\n        if (warningText.exists() && acceptButton.exists()) {\n            acceptButton.click();\n        }\n    }\n\n\n    // If Chrome is a fresh install then these popups may be presented\n    // dismiss them if visible.\n    public void dismissChromePopup() throws Exception {\n        UiObject accept =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/terms_accept\")\n                .className(\"android.widget.Button\"));\n        if (accept.waitForExists(3000)){\n            accept.click();\n            UiObject negative =\n                mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/negative_button\")\n                    .className(\"android.widget.Button\"));\n            if (negative.waitForExists(10000)) {\n                negative.click();\n            }\n        }\n        UiObject lite =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/button_secondary\")\n                .className(\"android.widget.Button\"));\n        if (lite.exists()){\n            lite.click();\n        }\n    }\n\n    // Override getParams function to decode a url encoded parameter bundle before\n    // passing it to workloads.\n    public Bundle getParams() {\n        // Get the original parameter bundle\n        Bundle parameters = getArguments();\n\n        // Decode each parameter in the bundle, except null values and \"class\", as this\n        // used to control instrumentation and therefore not encoded.\n        for (String key : parameters.keySet()) {\n            String param = parameters.getString(key);\n            if (param != null && !key.equals(\"class\")) {\n                param = android.net.Uri.decode(param);\n                parameters = decode(parameters, key, param);\n            }\n        }\n        return parameters;\n    }\n\n    // Helper function to decode a string and insert it as an appropriate type\n    // into a provided bundle with its key.\n    // Each bundle parameter will be a urlencoded string with 2 characters prefixed to the value\n    // used to store the original type information, e.g. 'fl' -> list of floats.\n    private Bundle decode(Bundle parameters, String key, String value) {\n        char value_type = value.charAt(0);\n        char value_dimension = value.charAt(1);\n        String param = value.substring(2);\n\n        if (value_dimension == 's') {\n            if (value_type == 's') {\n                parameters.putString(key, param);\n            } else if (value_type == 'f') {\n                parameters.putFloat(key, Float.parseFloat(param));\n            } else if (value_type == 'd') {\n                parameters.putDouble(key, Double.parseDouble(param));\n            } else if (value_type == 'b') {\n                parameters.putBoolean(key, Boolean.parseBoolean(param));\n            } else if (value_type == 'i') {\n                parameters.putInt(key, Integer.parseInt(param));\n            } else if (value_type == 'n') {\n                parameters.putString(key, \"None\");\n            } else {\n                throw new IllegalArgumentException(\"Error decoding:\" + key + value\n                                                   + \" - unknown format\");\n            }\n        } else if (value_dimension == 'l') {\n            return decodeArray(parameters, key, value_type, param);\n        } else {\n            throw new IllegalArgumentException(\"Error decoding:\" + key + value\n                    + \" - unknown format\");\n        }\n        return parameters;\n    }\n\n    // Helper function to deal with decoding arrays and update the bundle with\n    // an appropriate array type. The string \"0newelement0\" is used to distinguish\n    // each element from each other in the array when encoded.\n    private Bundle decodeArray(Bundle parameters, String key, char type, String value) {\n        String[] string_list = value.split(\"0newelement0\");\n        if (type == 's') {\n            parameters.putStringArray(key, string_list);\n        }\n        else if (type == 'i') {\n            int[] int_list = new int[string_list.length];\n            for (int i = 0; i < string_list.length; i++){\n                int_list[i] = Integer.parseInt(string_list[i]);\n            }\n            parameters.putIntArray(key, int_list);\n        } else if (type == 'f') {\n            float[] float_list = new float[string_list.length];\n            for (int i = 0; i < string_list.length; i++){\n                float_list[i] = Float.parseFloat(string_list[i]);\n            }\n            parameters.putFloatArray(key, float_list);\n        } else if (type == 'd') {\n            double[] double_list = new double[string_list.length];\n            for (int i = 0; i < string_list.length; i++){\n                double_list[i] = Double.parseDouble(string_list[i]);\n            }\n            parameters.putDoubleArray(key, double_list);\n        } else if (type == 'b') {\n            boolean[] boolean_list = new boolean[string_list.length];\n            for (int i = 0; i < string_list.length; i++){\n                boolean_list[i] = Boolean.parseBoolean(string_list[i]);\n            }\n            parameters.putBooleanArray(key, boolean_list);\n        } else {\n            throw new IllegalArgumentException(\"Error decoding array: \" +\n                                               value + \" - unknown format\");\n        }\n        return parameters;\n    }\n}\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/java/com/arm/wa/uiauto/UiAutoUtils.java",
    "content": "/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto;\n\nimport android.os.Bundle;\n\npublic final class UiAutoUtils {\n\n    /** Construct launch command of an application. */\n    public static String createLaunchCommand(Bundle parameters) {\n        String launchCommand;\n        String activityName = parameters.getString(\"launch_activity\");\n        String packageName = parameters.getString(\"package_name\");\n        if (activityName.equals(\"None\")) {\n            launchCommand = String.format(\"am start --user -3 %s\", packageName);\n        }\n        else {\n            launchCommand = String.format(\"am start --user -3 -n %s/%s\", packageName, activityName);\n        }\n        return launchCommand;\n    }\n}\n"
  },
  {
    "path": "wa/framework/uiauto/app/src/main/java/com/arm/wa/uiauto/UxPerfUiAutomation.java",
    "content": "/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto;\n\nimport android.os.Bundle;\n\nimport java.util.logging.Logger;\n\nimport com.arm.wa.uiauto.BaseUiAutomation.Direction;\nimport com.arm.wa.uiauto.BaseUiAutomation.PinchType;\n\n\npublic class UxPerfUiAutomation {\n\n    private Logger logger = Logger.getLogger(UxPerfUiAutomation.class.getName());\n\n    public enum GestureType { UIDEVICE_SWIPE, UIOBJECT_SWIPE, PINCH };\n\n    public static class GestureTestParams {\n        public GestureType gestureType;\n        public Direction gestureDirection;\n        public PinchType pinchType;\n        public int percent;\n        public int steps;\n\n        public GestureTestParams(GestureType gesture, Direction direction, int steps) {\n            this.gestureType = gesture;\n            this.gestureDirection = direction;\n            this.pinchType = PinchType.NULL;\n            this.steps = steps;\n            this.percent = 0;\n        }\n\n        public GestureTestParams(GestureType gesture, PinchType pinchType, int steps, int percent) {\n            this.gestureType = gesture;\n            this.gestureDirection = Direction.NULL;\n            this.pinchType = pinchType;\n            this.steps = steps;\n            this.percent = percent;\n        }\n    }\n}\n"
  },
  {
    "path": "wa/framework/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n\n    // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/framework/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Build and return appropriate exit code if failed\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [ $exit_code -ne 0 ]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\ncp app/build/outputs/aar/app-debug.aar ./uiauto.aar\n"
  },
  {
    "path": "wa/framework/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/framework/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/framework/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/framework/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/framework/version.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport sys\nfrom collections import namedtuple\nfrom subprocess import Popen, PIPE\n\n\nVersionTuple = namedtuple('Version', ['major', 'minor', 'revision', 'dev'])\n\nversion = VersionTuple(3, 4, 0, 'dev1')\n\nrequired_devlib_version = VersionTuple(1, 4, 0, 'dev3')\n\n\ndef format_version(v):\n    version_string = '{}.{}.{}'.format(\n        v.major, v.minor, v.revision)\n    if v.dev:\n        version_string += '.{}'.format(v.dev)\n    return version_string\n\n\ndef get_wa_version():\n    return format_version(version)\n\n\ndef get_wa_version_with_commit():\n    version_string = get_wa_version()\n    commit = get_commit()\n    if commit:\n        return '{}+{}'.format(version_string, commit)\n    else:\n        return version_string\n\n\ndef get_commit():\n    try:\n        p = Popen(['git', 'rev-parse', 'HEAD'],\n                  cwd=os.path.dirname(__file__), stdout=PIPE, stderr=PIPE)\n    except FileNotFoundError:\n        return None\n    std, _ = p.communicate()\n    p.wait()\n    if p.returncode:\n        return None\n    return std[:8].decode(sys.stdout.encoding or 'utf-8')\n"
  },
  {
    "path": "wa/framework/workload.py",
    "content": "#    Copyright 2014-2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport logging\nimport os\nimport threading\nimport time\n\ntry:\n    from shlex import quote\nexcept ImportError:\n    from pipes import quote\n\n\nfrom wa.utils.android import get_cacheable_apk_info, build_apk_launch_command\nfrom wa.framework.plugin import TargetedPlugin, Parameter\nfrom wa.framework.resource import (ApkFile, ReventFile,\n                                   File, loose_version_matching,\n                                   range_version_matching)\nfrom wa.framework.exception import WorkloadError, ConfigError\nfrom wa.utils.types import ParameterDict, list_or_string, version_tuple\nfrom wa.utils.revent import ReventRecorder\nfrom wa.utils.exec_control import once_per_instance\nfrom wa.utils.misc import atomic_write_path\n\n\nclass Workload(TargetedPlugin):\n    \"\"\"\n    This is the base class for the workloads executed by the framework.\n    Each of the methods throwing NotImplementedError *must* be implemented\n    by the derived classes.\n    \"\"\"\n\n    kind = 'workload'\n\n    parameters = [\n        Parameter('uninstall', kind=bool,\n                  default=True,\n                  description=\"\"\"\n                  If ``True``, executables that are installed to the device\n                  as part of the workload will be uninstalled again.\n                  \"\"\"),\n    ]\n\n    # Set this to True to mark that this workload poses a risk of exposing\n    # information to the outside world about the device it runs on. An example of\n    # this would be a benchmark application that sends scores and device data to a\n    # database owned by the maintainer.\n    # The user can then set allow_phone_home=False in their configuration to\n    # prevent this workload from being run accidentally.\n    phones_home = False\n\n    # Set this to ``True`` to mark the the workload will fail without a network\n    # connection, this enables it to fail early with a clear message.\n    requires_network = False\n\n    # Set this to specify a custom directory for assets to be pushed to, if unset\n    # the working directory will be used.\n    asset_directory = None\n\n    # Used to store information about workload assets.\n    deployable_assets = []\n\n    def __init__(self, target, **kwargs):\n        super(Workload, self).__init__(target, **kwargs)\n        self.asset_files = []\n        self.deployed_assets = []\n\n        supported_platforms = getattr(self, 'supported_platforms', [])\n        if supported_platforms and self.target.os not in supported_platforms:\n            msg = 'Supported platforms for \"{}\" are \"{}\", attempting to run on \"{}\"'\n            raise WorkloadError(msg.format(self.name, ' '.join(self.supported_platforms),\n                                           self.target.os))\n\n    def init_resources(self, context):\n        \"\"\"\n        This method may be used to perform early resource discovery and\n        initialization. This is invoked during the initial loading stage and\n        before the device is ready, so cannot be used for any device-dependent\n        initialization. This method is invoked before the workload instance is\n        validated.\n\n        \"\"\"\n        for asset in self.deployable_assets:\n            self.asset_files.append(context.get(File(self, asset)))\n\n    @once_per_instance\n    def initialize(self, context):\n        \"\"\"\n        This method should be used to perform once-per-run initialization of a\n        workload instance, i.e., unlike ``setup()`` it will not be invoked on\n        each iteration.\n        \"\"\"\n        if self.asset_files:\n            self.deploy_assets(context)\n\n    def setup(self, context):\n        \"\"\"\n        Perform the setup necessary to run the workload, such as copying the\n        necessary files to the device, configuring the environments, etc.\n\n        This is also the place to perform any on-device checks prior to\n        attempting to execute the workload.\n        \"\"\"\n        # pylint: disable=unused-argument\n        if self.requires_network and not self.target.is_network_connected():\n            raise WorkloadError(\n                'Workload \"{}\" requires internet. Target does not appear '\n                'to be connected to the internet.'.format(self.name))\n\n    def run(self, context):\n        \"\"\"\n        Execute the workload. This is the method that performs the actual\n        \"work\" of the workload.\n        \"\"\"\n\n    def extract_results(self, context):\n        \"\"\"\n        Extract results on the target\n        \"\"\"\n\n    def update_output(self, context):\n        \"\"\"\n        Update the output within the specified execution context with the\n        metrics and artifacts for this workload iteration.\n\n        \"\"\"\n\n    def teardown(self, context):\n        \"\"\" Perform any final clean up for the Workload. \"\"\"\n\n    @once_per_instance\n    def finalize(self, context):\n        if self.cleanup_assets:\n            self.remove_assets(context)\n\n    def deploy_assets(self, context):\n        \"\"\" Deploy assets if available to the target \"\"\"\n        # pylint: disable=unused-argument\n        if not self.asset_directory:\n            self.asset_directory = self.target.working_directory\n        else:\n            self.target.execute('mkdir -p {}'.format(self.asset_directory))\n\n        for asset in self.asset_files:\n            self.target.push(asset, self.asset_directory)\n            self.deployed_assets.append(self.target.path.join(self.asset_directory,\n                                                              os.path.basename(asset)))\n\n    def remove_assets(self, context):\n        \"\"\" Cleanup assets deployed to the target \"\"\"\n        # pylint: disable=unused-argument\n        for asset in self.deployed_assets:\n            self.target.remove(asset)\n\n    def __str__(self):\n        return '<Workload {}>'.format(self.name)\n\n\nclass ApkWorkload(Workload):\n\n    supported_platforms = ['android']\n\n    # May be optionally overwritten by subclasses\n    # Times are in seconds\n    loading_time = 10\n    package_names = []\n    supported_versions = []\n    activity = None\n    view = None\n    clear_data_on_reset = True\n    apk_arguments = {}\n\n    # Set this to True to mark that this workload requires the target apk to be run\n    # for initialisation purposes before the main run is performed.\n    requires_rerun = False\n\n    parameters = [\n        Parameter('package_name', kind=str,\n                  description=\"\"\"\n                  The package name that can be used to specify\n                  the workload apk to use.\n                  \"\"\"),\n        Parameter('install_timeout', kind=int,\n                  constraint=lambda x: x > 0,\n                  default=300,\n                  description=\"\"\"\n                  Timeout for the installation of the apk.\n                  \"\"\"),\n        Parameter('version', kind=str,\n                  default=None,\n                  description=\"\"\"\n                  The version of the package to be used.\n                  \"\"\"),\n        Parameter('max_version', kind=str,\n                  default=None,\n                  description=\"\"\"\n                  The maximum version of the package to be used.\n                  \"\"\"),\n        Parameter('min_version', kind=str,\n                  default=None,\n                  description=\"\"\"\n                  The minimum version of the package to be used.\n                  \"\"\"),\n        Parameter('variant', kind=str,\n                  default=None,\n                  description=\"\"\"\n                  The variant of the package to be used.\n                  \"\"\"),\n        Parameter('strict', kind=bool,\n                  default=False,\n                  description=\"\"\"\n                  Whether to throw an error if the specified package cannot be found\n                  on host.\n                  \"\"\"),\n        Parameter('force_install', kind=bool,\n                  default=False,\n                  description=\"\"\"\n                  Always re-install the APK, even if matching version is found already installed\n                  on the device.\n                  \"\"\"),\n        Parameter('uninstall', kind=bool,\n                  default=False,\n                  override=True,\n                  description=\"\"\"\n                  If ``True``, will uninstall workload\\'s APK as part of teardown.'\n                  \"\"\"),\n        Parameter('exact_abi', kind=bool,\n                  default=False,\n                  description=\"\"\"\n                  If ``True``, workload will check that the APK matches the target\n                  device ABI, otherwise any suitable APK found will be used.\n                  \"\"\"),\n        Parameter('prefer_host_package', kind=bool,\n                  default=True,\n                  aliases=['check_apk'],\n                  description=\"\"\"\n                  If ``True`` then a package found on the host\n                  will be preferred if it is a valid version and ABI, if not it\n                  will fall back to the version on the target if available. If\n                  ``False`` then the version on the target is preferred instead.\n                  \"\"\"),\n        Parameter('view', kind=str, default=None, merge=True,\n                  description=\"\"\"\n                  Manually override the 'View' of the workload for use with\n                  instruments such as the ``fps`` instrument. If not specified,\n                  a workload dependant 'View' will be automatically generated.\n                  \"\"\"),\n    ]\n\n    @property\n    def package(self):\n        return self.apk.package\n\n    def __init__(self, target, **kwargs):\n        if target.os == 'chromeos':\n            if target.supports_android:\n                target = target.android_container\n            else:\n                raise ConfigError('Target does not appear to support Android')\n\n        super(ApkWorkload, self).__init__(target, **kwargs)\n\n        if self.activity is not None and '.' not in self.activity:\n            # If we're receiving just the activity name, it's taken relative to\n            # the package namespace:\n            self.activity = '.' + self.activity\n\n        self.apk = PackageHandler(self,\n                                  package_name=self.package_name,\n                                  variant=self.variant,\n                                  strict=self.strict,\n                                  version=self.version or self.supported_versions,\n                                  force_install=self.force_install,\n                                  install_timeout=self.install_timeout,\n                                  uninstall=self.uninstall,\n                                  exact_abi=self.exact_abi,\n                                  prefer_host_package=self.prefer_host_package,\n                                  clear_data_on_reset=self.clear_data_on_reset,\n                                  activity=self.activity,\n                                  min_version=self.min_version,\n                                  max_version=self.max_version,\n                                  apk_arguments=self.apk_arguments)\n\n    def validate(self):\n        if self.min_version and self.max_version:\n            if version_tuple(self.min_version) > version_tuple(self.max_version):\n                msg = 'Cannot specify min version ({}) greater than max version ({})'\n                raise ConfigError(msg.format(self.min_version, self.max_version))\n\n    @once_per_instance\n    def initialize(self, context):\n        super(ApkWorkload, self).initialize(context)\n        self.apk.initialize(context)\n        # pylint: disable=access-member-before-definition, attribute-defined-outside-init\n        if self.version is None:\n            self.version = self.apk.apk_info.version_name\n        if self.view is None:\n            self.view = 'SurfaceView - {}/{}'.format(self.apk.package,\n                                                     self.apk.activity)\n\n    def setup(self, context):\n        super(ApkWorkload, self).setup(context)\n        self.apk.setup(context)\n        if self.requires_rerun:\n            self.setup_rerun()\n            self.apk.restart_activity()\n        time.sleep(self.loading_time)\n\n    def setup_rerun(self):\n        \"\"\"\n        Perform the setup necessary to rerun the workload. Only called if\n        ``requires_rerun`` is set.\n        \"\"\"\n\n    def teardown(self, context):\n        super(ApkWorkload, self).teardown(context)\n        self.apk.teardown()\n\n    def deploy_assets(self, context):\n        super(ApkWorkload, self).deploy_assets(context)\n        self.target.refresh_files(self.deployed_assets)\n\n\nclass ApkUIWorkload(ApkWorkload):\n\n    def __init__(self, target, **kwargs):\n        super(ApkUIWorkload, self).__init__(target, **kwargs)\n        self.gui = None\n\n    def init_resources(self, context):\n        super(ApkUIWorkload, self).init_resources(context)\n        self.gui.init_resources(context)\n\n    @once_per_instance\n    def initialize(self, context):\n        super(ApkUIWorkload, self).initialize(context)\n        self.gui.deploy()\n\n    def setup(self, context):\n        super(ApkUIWorkload, self).setup(context)\n        self.gui.setup()\n\n    def run(self, context):\n        super(ApkUIWorkload, self).run(context)\n        self.gui.run()\n\n    def extract_results(self, context):\n        super(ApkUIWorkload, self).extract_results(context)\n        self.gui.extract_results()\n\n    def teardown(self, context):\n        self.gui.teardown()\n        super(ApkUIWorkload, self).teardown(context)\n\n    @once_per_instance\n    def finalize(self, context):\n        super(ApkUIWorkload, self).finalize(context)\n        if self.cleanup_assets:\n            self.gui.remove()\n\n\nclass ApkUiautoWorkload(ApkUIWorkload):\n\n    parameters = [\n        Parameter('markers_enabled', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, workloads will insert markers into logs\n                  at various points during execution. These markers may be used\n                  by other plugins or post-processing scripts to provide\n                  measurements or statistics for specific parts of the workload\n                  execution.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(ApkUiautoWorkload, self).__init__(target, **kwargs)\n        self.gui = UiAutomatorGUI(self)\n\n    def setup(self, context):\n        self.gui.uiauto_params['package_name'] = self.apk.apk_info.package\n        self.gui.uiauto_params['markers_enabled'] = self.markers_enabled\n        self.gui.init_commands()\n        super(ApkUiautoWorkload, self).setup(context)\n\n\nclass ApkReventWorkload(ApkUIWorkload):\n\n    # May be optionally overwritten by subclasses\n    # Times are in seconds\n    setup_timeout = 5 * 60\n    run_timeout = 10 * 60\n    extract_results_timeout = 5 * 60\n    teardown_timeout = 5 * 60\n\n    def __init__(self, target, **kwargs):\n        super(ApkReventWorkload, self).__init__(target, **kwargs)\n        self.gui = ReventGUI(self, target,\n                             self.setup_timeout,\n                             self.run_timeout,\n                             self.extract_results_timeout,\n                             self.teardown_timeout)\n\n\nclass UIWorkload(Workload):\n\n    def __init__(self, target, **kwargs):\n        super(UIWorkload, self).__init__(target, **kwargs)\n        self.gui = None\n\n    def init_resources(self, context):\n        super(UIWorkload, self).init_resources(context)\n        self.gui.init_resources(context)\n\n    @once_per_instance\n    def initialize(self, context):\n        super(UIWorkload, self).initialize(context)\n        self.gui.deploy()\n\n    def setup(self, context):\n        super(UIWorkload, self).setup(context)\n        self.gui.setup()\n\n    def run(self, context):\n        super(UIWorkload, self).run(context)\n        self.gui.run()\n\n    def extract_results(self, context):\n        super(UIWorkload, self).extract_results(context)\n        self.gui.extract_results()\n\n    def teardown(self, context):\n        self.gui.teardown()\n        super(UIWorkload, self).teardown(context)\n\n    @once_per_instance\n    def finalize(self, context):\n        super(UIWorkload, self).finalize(context)\n        if self.cleanup_assets:\n            self.gui.remove()\n\n\nclass UiautoWorkload(UIWorkload):\n\n    supported_platforms = ['android']\n\n    parameters = [\n        Parameter('markers_enabled', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, workloads will insert markers into logs\n                  at various points during execution. These markers may be used\n                  by other plugins or post-processing scripts to provide\n                  measurements or statistics for specific parts of the workload\n                  execution.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        if target.os == 'chromeos':\n            if target.supports_android:\n                target = target.android_container\n            else:\n                raise ConfigError('Target does not appear to support Android')\n\n        super(UiautoWorkload, self).__init__(target, **kwargs)\n        self.gui = UiAutomatorGUI(self)\n\n    def setup(self, context):\n        self.gui.uiauto_params['markers_enabled'] = self.markers_enabled\n        self.gui.init_commands()\n        super(UiautoWorkload, self).setup(context)\n\n\nclass ReventWorkload(UIWorkload):\n\n    # May be optionally overwritten by subclasses\n    # Times are in seconds\n    setup_timeout = 5 * 60\n    run_timeout = 10 * 60\n    extract_results_timeout = 5 * 60\n    teardown_timeout = 5 * 60\n\n    def __init__(self, target, **kwargs):\n        super(ReventWorkload, self).__init__(target, **kwargs)\n        self.gui = ReventGUI(self, target,\n                             self.setup_timeout,\n                             self.run_timeout,\n                             self.extract_results_timeout,\n                             self.teardown_timeout)\n\n\nclass UiAutomatorGUI(object):\n\n    stages = ['setup', 'runWorkload', 'extractResults', 'teardown']\n\n    uiauto_runner = 'android.support.test.runner.AndroidJUnitRunner'\n\n    def __init__(self, owner, package=None, klass='UiAutomation', timeout=600):\n        self.owner = owner\n        self.target = self.owner.target\n        self.uiauto_package = package\n        self.uiauto_class = klass\n        self.timeout = timeout\n        self.logger = logging.getLogger('gui')\n        self.uiauto_file = None\n        self.commands = {}\n        self.uiauto_params = ParameterDict()\n\n    def init_resources(self, resolver):\n        self.uiauto_file = resolver.get(ApkFile(self.owner, uiauto=True))\n        if not self.uiauto_package:\n            uiauto_info = get_cacheable_apk_info(self.uiauto_file)\n            self.uiauto_package = uiauto_info.package\n\n    def init_commands(self):\n        params_dict = self.uiauto_params\n        params_dict['workdir'] = self.target.working_directory\n        params = ''\n        for k, v in params_dict.iter_encoded_items():\n            params += ' -e {} {}'.format(k, v)\n\n        for stage in self.stages:\n            class_string = '{}.{}#{}'.format(self.uiauto_package, self.uiauto_class,\n                                             stage)\n            instrumentation_string = '{}/{}'.format(self.uiauto_package,\n                                                    self.uiauto_runner)\n            cmd_template = 'am instrument -w -r{} -e class {} {}'\n            self.commands[stage] = cmd_template.format(params, class_string,\n                                                       instrumentation_string)\n\n    def deploy(self):\n        if self.target.package_is_installed(self.uiauto_package):\n            self.target.uninstall_package(self.uiauto_package)\n        self.target.install_apk(self.uiauto_file)\n\n    def set(self, name, value):\n        self.uiauto_params[name] = value\n\n    def setup(self, timeout=None):\n        if not self.commands:\n            raise RuntimeError('Commands have not been initialized')\n        self.target.killall('uiautomator')\n        self._execute('setup', timeout or self.timeout)\n\n    def run(self, timeout=None):\n        if not self.commands:\n            raise RuntimeError('Commands have not been initialized')\n        self._execute('runWorkload', timeout or self.timeout)\n\n    def extract_results(self, timeout=None):\n        if not self.commands:\n            raise RuntimeError('Commands have not been initialized')\n        self._execute('extractResults', timeout or self.timeout)\n\n    def teardown(self, timeout=None):\n        if not self.commands:\n            raise RuntimeError('Commands have not been initialized')\n        self._execute('teardown', timeout or self.timeout)\n\n    def remove(self):\n        self.target.uninstall(self.uiauto_package)\n\n    def _execute(self, stage, timeout):\n        result = self.target.execute(self.commands[stage], timeout)\n        if 'FAILURE' in result:\n            raise WorkloadError(result)\n        else:\n            self.logger.debug(result)\n        time.sleep(2)\n\n\nclass ReventGUI(object):\n\n    def __init__(self, workload, target, setup_timeout, run_timeout,\n                 extract_results_timeout, teardown_timeout):\n        self.logger = logging.getLogger(self.__class__.__name__)\n        self.workload = workload\n        self.target = target\n        self.setup_timeout = setup_timeout\n        self.run_timeout = run_timeout\n        self.extract_results_timeout = extract_results_timeout\n        self.teardown_timeout = teardown_timeout\n        self.revent_recorder = ReventRecorder(self.target)\n        self.on_target_revent_binary = self.target.get_workpath('revent')\n        self.on_target_setup_revent = self.target.get_workpath('{}.setup.revent'.format(self.target.model))\n        self.on_target_run_revent = self.target.get_workpath('{}.run.revent'.format(self.target.model))\n        self.on_target_extract_results_revent = self.target.get_workpath('{}.extract_results.revent'.format(self.target.model))\n        self.on_target_teardown_revent = self.target.get_workpath('{}.teardown.revent'.format(self.target.model))\n        self.revent_setup_file = None\n        self.revent_run_file = None\n        self.revent_extract_results_file = None\n        self.revent_teardown_file = None\n\n    def init_resources(self, resolver):\n        self.revent_setup_file = resolver.get(ReventFile(owner=self.workload,\n                                                         stage='setup',\n                                                         target=self.target.model),\n                                              strict=False)\n        self.revent_run_file = resolver.get(ReventFile(owner=self.workload,\n                                                       stage='run',\n                                                       target=self.target.model))\n        self.revent_extract_results_file = resolver.get(ReventFile(owner=self.workload,\n                                                                   stage='extract_results',\n                                                                   target=self.target.model),\n                                                        strict=False)\n        self.revent_teardown_file = resolver.get(resource=ReventFile(owner=self.workload,\n                                                                     stage='teardown',\n                                                                     target=self.target.model),\n                                                 strict=False)\n\n    def deploy(self):\n        self.revent_recorder.deploy()\n\n    def setup(self):\n        self._check_revent_files()\n        if self.revent_setup_file:\n            self.revent_recorder.replay(self.on_target_setup_revent,\n                                        timeout=self.setup_timeout)\n\n    def run(self):\n        self.logger.debug('Replaying \"%s\" with %d seconds timeout',\n                          os.path.basename(self.on_target_run_revent),\n                          self.run_timeout)\n        self.revent_recorder.replay(self.on_target_run_revent,\n                                    timeout=self.run_timeout)\n        self.logger.debug('Replay completed.')\n\n    def extract_results(self):\n        if self.revent_extract_results_file:\n            self.revent_recorder.replay(self.on_target_extract_results_revent,\n                                        timeout=self.extract_results_timeout)\n\n    def teardown(self):\n        if self.revent_teardown_file:\n            self.revent_recorder.replay(self.on_target_teardown_revent,\n                                        timeout=self.teardown_timeout)\n\n    def remove(self):\n        self.target.remove(self.on_target_setup_revent)\n        self.target.remove(self.on_target_run_revent)\n        self.target.remove(self.on_target_extract_results_revent)\n        self.target.remove(self.on_target_teardown_revent)\n        self.revent_recorder.remove()\n\n    def _check_revent_files(self):\n        if not self.revent_run_file:\n            # pylint: disable=too-few-format-args\n            message = '{0}.run.revent file does not exist, ' \\\n                      'Please provide one for your target, {0}'\n            raise WorkloadError(message.format(self.target.model))\n\n        self.target.push(self.revent_run_file, self.on_target_run_revent)\n        if self.revent_setup_file:\n            self.target.push(self.revent_setup_file, self.on_target_setup_revent)\n        if self.revent_extract_results_file:\n            self.target.push(self.revent_extract_results_file, self.on_target_extract_results_revent)\n        if self.revent_teardown_file:\n            self.target.push(self.revent_teardown_file, self.on_target_teardown_revent)\n\n\nclass PackageHandler(object):\n\n    @property\n    def package(self):\n        if self.apk_info is None:\n            return None\n        return self.apk_info.package\n\n    @property\n    def activity(self):\n        if self._activity:\n            return self._activity\n        if self.apk_info is None:\n            return None\n        return self.apk_info.activity\n\n    # pylint: disable=too-many-locals\n    def __init__(self, owner, install_timeout=300, version=None, variant=None,\n                 package_name=None, strict=False, force_install=False, uninstall=False,\n                 exact_abi=False, prefer_host_package=True, clear_data_on_reset=True,\n                 activity=None, min_version=None, max_version=None, apk_arguments=None):\n        self.logger = logging.getLogger('apk')\n        self.owner = owner\n        self.target = self.owner.target\n        self.install_timeout = install_timeout\n        self.version = version\n        self.min_version = min_version\n        self.max_version = max_version\n        self.variant = variant\n        self.package_name = package_name\n        self.strict = strict\n        self.force_install = force_install\n        self.uninstall = uninstall\n        self.exact_abi = exact_abi\n        self.prefer_host_package = prefer_host_package\n        self.clear_data_on_reset = clear_data_on_reset\n        self._activity = activity\n        self.supported_abi = self.target.supported_abi\n        self.apk_file = None\n        self.apk_info = None\n        self.apk_version = None\n        self.logcat_log = None\n        self.error_msg = None\n        self.apk_arguments = apk_arguments\n\n    def initialize(self, context):\n        self.resolve_package(context)\n\n    def setup(self, context):\n        context.update_metadata('app_version', self.apk_info.version_name)\n        context.update_metadata('app_name', self.apk_info.package)\n        self.initialize_package(context)\n        self.start_activity()\n        self.target.execute('am kill-all')  # kill all *background* activities\n        self.target.clear_logcat()\n\n    def resolve_package(self, context):\n        if not self.owner.package_names and not self.package_name:\n            msg = 'Cannot Resolve package; No package name(s) specified'\n            raise WorkloadError(msg)\n\n        self.error_msg = None\n        if self.prefer_host_package:\n            self.resolve_package_from_host(context)\n            if not self.apk_file:\n                self.resolve_package_from_target()\n        else:\n            self.resolve_package_from_target()\n            if not self.apk_file:\n                self.resolve_package_from_host(context)\n\n        if self.apk_file:\n            self.apk_info = get_cacheable_apk_info(self.apk_file)\n        else:\n            if self.error_msg:\n                raise WorkloadError(self.error_msg)\n            else:\n                if self.package_name:\n                    message = 'Package \"{package}\" not found for workload {name} '\\\n                              'on host or target.'\n                elif self.version:\n                    message = 'No matching package found for workload {name} '\\\n                              '(version {version}) on host or target.'\n                else:\n                    message = 'No matching package found for workload {name} on host or target'\n                raise WorkloadError(message.format(name=self.owner.name, version=self.version,\n                                                   package=self.package_name))\n\n    def resolve_package_from_host(self, context):\n        self.logger.debug('Resolving package on host system')\n        if self.package_name:\n            self.apk_file = context.get_resource(ApkFile(self.owner,\n                                                         variant=self.variant,\n                                                         version=self.version,\n                                                         package=self.package_name,\n                                                         exact_abi=self.exact_abi,\n                                                         supported_abi=self.supported_abi,\n                                                         min_version=self.min_version,\n                                                         max_version=self.max_version),\n                                                 strict=self.strict)\n        else:\n            available_packages = []\n            for package in self.owner.package_names:\n                apk_file = context.get_resource(ApkFile(self.owner,\n                                                        variant=self.variant,\n                                                        version=self.version,\n                                                        package=package,\n                                                        exact_abi=self.exact_abi,\n                                                        supported_abi=self.supported_abi,\n                                                        min_version=self.min_version,\n                                                        max_version=self.max_version),\n                                                strict=self.strict)\n                if apk_file:\n                    available_packages.append(apk_file)\n            if len(available_packages) == 1:\n                self.apk_file = available_packages[0]\n            elif len(available_packages) > 1:\n                self.error_msg = self._get_package_error_msg('host')\n\n    def resolve_package_from_target(self):  # pylint: disable=too-many-branches\n        self.logger.debug('Resolving package on target')\n        found_package = None\n        if self.package_name:\n            if not self.target.package_is_installed(self.package_name):\n                return\n            else:\n                installed_versions = [self.package_name]\n        else:\n            installed_versions = []\n            for package in self.owner.package_names:\n                if self.target.package_is_installed(package):\n                    installed_versions.append(package)\n\n        if self.version or self.min_version or self.max_version:\n            matching_packages = []\n            for package in installed_versions:\n                package_version = self.target.get_package_version(package)\n                if self.version:\n                    for v in list_or_string(self.version):\n                        if loose_version_matching(v, package_version):\n                            matching_packages.append(package)\n                else:\n                    if range_version_matching(package_version, self.min_version,\n                                              self.max_version):\n                        matching_packages.append(package)\n\n            if len(matching_packages) == 1:\n                found_package = matching_packages[0]\n            elif len(matching_packages) > 1:\n                self.error_msg = self._get_package_error_msg('device')\n        else:\n            if len(installed_versions) == 1:\n                found_package = installed_versions[0]\n            elif len(installed_versions) > 1:\n                self.error_msg = 'Package version not set and multiple versions found on device.'\n        if found_package:\n            self.logger.debug('Found matching package on target; Pulling to host.')\n            self.apk_file = self.pull_apk(found_package)\n            self.package_name = found_package\n\n    def initialize_package(self, context):\n        installed_version = self.target.get_package_version(self.apk_info.package)\n        host_version = self.apk_info.version_name\n        if installed_version != host_version:\n            if installed_version:\n                message = '{} host version: {}, device version: {}; re-installing...'\n                self.logger.debug(message.format(self.owner.name, host_version,\n                                                 installed_version))\n            else:\n                message = '{} host version: {}, not found on device; installing...'\n                self.logger.debug(message.format(self.owner.name, host_version))\n            self.force_install = True  # pylint: disable=attribute-defined-outside-init\n        else:\n            message = '{} version {} present on both device and host.'\n            self.logger.debug(message.format(self.owner.name, host_version))\n        if self.force_install:\n            if installed_version:\n                self.target.uninstall_package(self.apk_info.package)\n            self.install_apk(context)\n        else:\n            self.reset(context)\n            if self.apk_info.permissions:\n                self.logger.debug('Granting runtime permissions')\n                for permission in self.apk_info.permissions:\n                    self.target.grant_package_permission(self.apk_info.package, permission)\n        self.apk_version = host_version\n\n    def start_activity(self):\n\n        cmd = build_apk_launch_command(self.apk_info.package, self.activity,\n                                       self.apk_arguments)\n\n        output = self.target.execute(cmd)\n        if 'Error:' in output:\n            # this will dismiss any error dialogs\n            self.target.execute('am force-stop {}'.format(self.apk_info.package))\n            raise WorkloadError(output)\n        self.logger.debug(output)\n\n    def restart_activity(self):\n        self.target.execute('am force-stop {}'.format(self.apk_info.package))\n        self.start_activity()\n\n    def reset(self, context):  # pylint: disable=W0613\n        self.target.execute('am force-stop {}'.format(self.apk_info.package))\n        if self.clear_data_on_reset:\n            self.target.execute('pm clear {}'.format(self.apk_info.package))\n\n    def install_apk(self, context):\n        # pylint: disable=unused-argument\n        output = self.target.install_apk(self.apk_file, self.install_timeout,\n                                         replace=True, allow_downgrade=True)\n        if 'Failure' in output:\n            if 'ALREADY_EXISTS' in output:\n                msg = 'Using already installed APK (did not uninstall properly?)'\n                self.logger.warning(msg)\n            else:\n                raise WorkloadError(output)\n        else:\n            self.logger.debug(output)\n\n    def pull_apk(self, package):\n        if not self.target.package_is_installed(package):\n            message = 'Cannot retrieve \"{}\" as not installed on Target'\n            raise WorkloadError(message.format(package))\n        package_info = self.target.get_package_info(package)\n        apk_name = self._get_package_name(package_info.apk_path)\n        host_path = os.path.join(self.owner.dependencies_directory, apk_name)\n        with atomic_write_path(host_path) as at_path:\n            self.target.pull(package_info.apk_path, at_path,\n                             timeout=self.install_timeout)\n        return host_path\n\n    def teardown(self):\n        self.target.execute('am force-stop {}'.format(self.apk_info.package))\n        if self.uninstall:\n            self.target.uninstall_package(self.apk_info.package)\n\n    def _get_package_name(self, apk_path):\n        return self.target.path.basename(apk_path)\n\n    def _get_package_error_msg(self, location):\n        if self.version:\n            msg = 'Multiple matches for \"{version}\" found on {location}.'\n        elif self.min_version and self.max_version:\n            msg = 'Multiple matches between versions \"{min_version}\" and \"{max_version}\" found on {location}.'\n        elif self.max_version:\n            msg = 'Multiple matches less than or equal to \"{max_version}\" found on {location}.'\n        elif self.min_version:\n            msg = 'Multiple matches greater or equal to \"{min_version}\" found on {location}.'\n        else:\n            msg = ''\n        return msg.format(version=self.version, min_version=self.min_version,\n                          max_version=self.max_version, location=location)\n\n\nclass TestPackageHandler(PackageHandler):\n    \"\"\"Class wrapping an APK used through ``am instrument``.\n    \"\"\"\n    def __init__(self, owner, instrument_args=None, raw_output=False,\n                 instrument_wait=True, no_hidden_api_checks=False,\n                 *args, **kwargs):\n        if instrument_args is None:\n            instrument_args = {}\n        super(TestPackageHandler, self).__init__(owner, *args, **kwargs)\n        self.raw = raw_output\n        self.args = instrument_args\n        self.wait = instrument_wait\n        self.no_checks = no_hidden_api_checks\n\n        self.cmd = ''\n        self.instrument_thread = None\n        self._instrument_output = None\n\n    def setup(self, context):\n        self.initialize_package(context)\n\n        words = ['am', 'instrument', '--user', '0']\n        if self.raw:\n            words.append('-r')\n        if self.wait:\n            words.append('-w')\n        if self.no_checks:\n            words.append('--no-hidden-api-checks')\n        for k, v in self.args.items():\n            words.extend(['-e', str(k), str(v)])\n\n        words.append(str(self.apk_info.package))\n        if self.apk_info.activity:\n            words[-1] += '/{}'.format(self.apk_info.activity)\n\n        self.cmd = ' '.join(quote(x) for x in words)\n        self.instrument_thread = threading.Thread(target=self._start_instrument)\n\n    def start_activity(self):\n        self.instrument_thread.start()\n\n    def wait_instrument_over(self):\n        self.instrument_thread.join()\n        if 'Error:' in self._instrument_output:\n            cmd = 'am force-stop {}'.format(self.apk_info.package)\n            self.target.execute(cmd)\n            raise WorkloadError(self._instrument_output)\n\n    def _start_instrument(self):\n        self._instrument_output = self.target.execute(self.cmd)\n        self.logger.debug(self._instrument_output)\n\n    def _get_package_name(self, apk_path):\n        return 'test_{}'.format(self.target.path.basename(apk_path))\n\n    @property\n    def instrument_output(self):\n        if self.instrument_thread.is_alive():\n            self.instrument_thread.join()  # writes self._instrument_output\n        return self._instrument_output\n"
  },
  {
    "path": "wa/instruments/__init__.py",
    "content": ""
  },
  {
    "path": "wa/instruments/delay.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=W0613,E1101,E0203,W0201\nimport time\n\nfrom wa import Instrument, Parameter\nfrom wa.framework.exception import ConfigError, InstrumentError\nfrom wa.framework.instrument import extremely_slow\nfrom wa.utils.types import identifier\n\n\nclass DelayInstrument(Instrument):\n\n    name = 'delay'\n    description = \"\"\"\n    This instrument introduces a delay before beginning a new\n    spec, a new job or before the main execution of a workload.\n\n    The delay may be specified as either a fixed period or a temperature\n    threshold that must be reached.\n\n    Optionally, if an active cooling solution is available on the device to\n    speed up temperature drop between runs, it may be controlled using this\n    instrument.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('temperature_file', default='/sys/devices/virtual/thermal/thermal_zone0/temp',\n                  global_alias='thermal_temp_file',\n                  description=\"\"\"\n                  Full path to the sysfile on the target that\n                  contains the target's temperature.\n                  \"\"\"),\n        Parameter('temperature_timeout', kind=int, default=600,\n                  global_alias='thermal_timeout',\n                  description=\"\"\"\n                  The timeout after which the instrument will\n                  stop waiting even if the specified threshold temperature is\n                  not reached. If this timeout is hit, then a warning will be\n                  logged stating the actual temperature at which the timeout has\n                  ended.\n                  \"\"\"),\n        Parameter('temperature_poll_period', kind=int, default=5,\n                  global_alias='thermal_sleep_time',\n                  description=\"\"\"\n                  How long to sleep (in seconds) between polling\n                  current target temperature.\n                  \"\"\"),\n        Parameter('temperature_between_specs', kind=int, default=None,\n                  global_alias='thermal_threshold_between_specs',\n                  description=\"\"\"\n                  Temperature (in target-specific units) the\n                  target must cool down to before the iteration spec will be\n                  run.\n\n                  If this is set to ``0`` then the devices initial temperature will\n                  used as the threshold.\n\n                  .. note:: This cannot be specified at the same time as\n                            ``fixed_between_specs``\n                  \"\"\"),\n        Parameter('fixed_between_specs', kind=int, default=None,\n                  global_alias='fixed_delay_between_specs',\n                  description=\"\"\"\n                  How long to sleep (in seconds) before starting\n                  a new workload spec.\n\n                  .. note:: This cannot be specified at the same time as\n                            ``temperature_between_specs``\n                  \"\"\"),\n        Parameter('temperature_between_jobs', kind=int, default=None,\n                  global_alias='thermal_threshold_between_jobs',\n                  aliases=['temperature_between_iterations'],\n                  description=\"\"\"\n                  Temperature (in target-specific units) the\n                  target must cool down to before the next job will be run.\n\n                  If this is set to ``0`` then the devices initial temperature will\n                  used as the threshold.\n\n                  .. note:: This cannot be specified at the same time as\n                            ``fixed_between_jobs``\n                  \"\"\"),\n        Parameter('fixed_between_jobs', kind=int, default=None,\n                  global_alias='fixed_delay_between_jobs',\n                  aliases=['fixed_between_iterations'],\n                  description=\"\"\"\n                  How long to sleep (in seconds) before starting each\n                  new job.\n\n                  .. note:: This cannot be specified at the same time as\n                            ``temperature_between_jobs``\n                  \"\"\"),\n        Parameter('fixed_before_start', kind=int, default=None,\n                  global_alias='fixed_delay_before_start',\n                  description=\"\"\"\n                  How long to sleep (in seconds) after setup for\n                  an iteration has been performed but before running the\n                  workload.\n\n                  .. note:: This cannot be specified at the same time as\n                            ``temperature_before_start``\n                  \"\"\"),\n        Parameter('temperature_before_start', kind=int, default=None,\n                  global_alias='thermal_threshold_before_start',\n                  description=\"\"\"\n                  Temperature (in device-specific units) the\n                  device must cool down to just before the actual workload\n                  execution (after setup has been performed).\n\n                  .. note:: This cannot be specified at the same time as\n                            ``fixed_between_jobs``\n                  \"\"\"),\n        Parameter('active_cooling', kind=bool, default=False,\n                  description=\"\"\"\n                  This instrument supports an active cooling\n                  solution while waiting for the device temperature to drop to\n                  the threshold. If you wish to use this feature please ensure\n                  the relevant module is installed on the device.\n                  \"\"\"),\n    ]\n\n    active_cooling_modules = ['mbed-fan', 'odroidxu3-fan']\n\n    def initialize(self, context):\n        if self.active_cooling:\n            self.cooling = self._discover_cooling_module()\n            if not self.cooling:\n                msg = 'Cooling module not found on target. Please install one of the following modules: {}'\n                raise InstrumentError(msg.format(self.active_cooling_modules))\n\n        if self.temperature_between_jobs == 0:\n            temp = self.target.read_int(self.temperature_file)\n            self.logger.debug('Setting temperature threshold between jobs to {}'.format(temp))\n            self.temperature_between_jobs = temp\n        if self.temperature_between_specs == 0:\n            temp = self.target.read_int(self.temperature_file)\n            msg = 'Setting temperature threshold between workload specs to {}'\n            self.logger.debug(msg.format(temp))\n            self.temperature_between_specs = temp\n\n    @extremely_slow\n    def start(self, context):\n        if self.fixed_before_start:\n            msg = 'Waiting for {}s before running workload...'\n            self.logger.info(msg.format(self.fixed_before_start))\n            time.sleep(self.fixed_before_start)\n        elif self.temperature_before_start:\n            self.logger.info('Waiting for temperature drop before running workload...')\n            self.wait_for_temperature(self.temperature_before_start)\n\n    @extremely_slow\n    def before_job(self, context):\n        if self.fixed_between_specs and context.spec_changed:\n            msg = 'Waiting for {}s before starting new spec...'\n            self.logger.info(msg.format(self.fixed_between_specs))\n            time.sleep(self.fixed_between_specs)\n        elif self.temperature_between_jobs and context.spec_changed:\n            self.logger.info('Waiting for temperature drop before starting new spec...')\n            self.wait_for_temperature(self.temperature_between_jobs)\n        elif self.fixed_between_jobs:\n            msg = 'Waiting for {}s before starting new job...'\n            self.logger.info(msg.format(self.fixed_between_jobs))\n            time.sleep(self.fixed_between_jobs)\n        elif self.temperature_between_jobs:\n            self.logger.info('Waiting for temperature drop before starting new job...')\n            self.wait_for_temperature(self.temperature_between_jobs)\n\n    def wait_for_temperature(self, temperature):\n        if self.active_cooling:\n            self.cooling.start()\n            self.do_wait_for_temperature(temperature)\n            self.cooling.stop()\n        else:\n            self.do_wait_for_temperature(temperature)\n\n    def do_wait_for_temperature(self, temperature):\n        reading = self.target.read_int(self.temperature_file)\n        waiting_start_time = time.time()\n        while reading > temperature:\n            self.logger.debug('target temperature: {}'.format(reading))\n            if time.time() - waiting_start_time > self.temperature_timeout:\n                self.logger.warning('Reached timeout; current temperature: {}'.format(reading))\n                break\n            time.sleep(self.temperature_poll_period)\n            reading = self.target.read_int(self.temperature_file)\n\n    def validate(self):\n        if (self.temperature_between_specs is not None\n                and self.fixed_between_specs is not None):\n            raise ConfigError('Both fixed delay and thermal threshold specified for specs.')\n\n        if (self.temperature_between_jobs is not None\n                and self.fixed_between_jobs is not None):\n            raise ConfigError('Both fixed delay and thermal threshold specified for jobs.')\n\n        if (self.temperature_before_start is not None\n                and self.fixed_before_start is not None):\n            raise ConfigError('Both fixed delay and thermal threshold specified before start.')\n\n        if not any([self.temperature_between_specs, self.fixed_between_specs,\n                    self.temperature_between_jobs, self.fixed_between_jobs,\n                    self.temperature_before_start, self.fixed_before_start]):\n            raise ConfigError('Delay instrument is enabled, but no delay is specified.')\n\n    def _discover_cooling_module(self):\n        cooling_module = None\n        for module in self.active_cooling_modules:\n            if self.target.has(module):\n                if not cooling_module:\n                    cooling_module = getattr(self.target, identifier(module))\n                else:\n                    msg = 'Multiple cooling modules found \"{}\" \"{}\".'\n                    raise InstrumentError(msg.format(cooling_module.name, module))\n        return cooling_module\n"
  },
  {
    "path": "wa/instruments/dmesg.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\n\nfrom wa import Instrument, Parameter\nfrom wa.framework.exception import InstrumentError\nfrom wa.framework.instrument import slow\nfrom wa.utils.misc import ensure_file_directory_exists as _f\n\n\nclass DmesgInstrument(Instrument):\n    # pylint: disable=no-member,attribute-defined-outside-init\n    \"\"\"\n    Collected dmesg output before and during the run.\n\n    \"\"\"\n\n    name = 'dmesg'\n\n    parameters = [\n        Parameter('loglevel', kind=int, allowed_values=list(range(8)),\n                  description='Set loglevel for console output.')\n    ]\n\n    loglevel_file = '/proc/sys/kernel/printk'\n\n    def initialize(self, context):  # pylint: disable=unused-argument\n        self.need_root = self.target.os == 'android'\n        if self.need_root and not self.target.is_rooted:\n            raise InstrumentError('Need root to collect dmesg on Android')\n\n    def setup(self, context):\n        if self.loglevel:\n            self.old_loglevel = self.target.read_int(self.loglevel_file)\n            self.target.write_value(self.loglevel_file, self.loglevel, verify=False)\n        self.before_file = _f(os.path.join(context.output_directory, 'dmesg', 'before'))\n        self.after_file = _f(os.path.join(context.output_directory, 'dmesg', 'after'))\n\n    @slow\n    def start(self, context):\n        with open(self.before_file, 'w') as wfh:\n            wfh.write(self.target.execute('dmesg', as_root=self.need_root))\n        context.add_artifact('dmesg_before', self.before_file, kind='data')\n        if self.target.is_rooted:\n            self.target.execute('dmesg -c', as_root=True)\n\n    @slow\n    def stop(self, context):\n        with open(self.after_file, 'w') as wfh:\n            wfh.write(self.target.execute('dmesg', as_root=self.need_root))\n        context.add_artifact('dmesg_after', self.after_file, kind='data')\n\n    def teardown(self, context):  # pylint: disable=unused-argument\n        if self.loglevel:\n            self.target.write_value(self.loglevel_file, self.old_loglevel, verify=False)\n"
  },
  {
    "path": "wa/instruments/energy_measurement.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=W0613,E1101\n\nfrom collections import defaultdict\nimport os\nimport shutil\n\n\nfrom devlib import DerivedEnergyMeasurements\nfrom devlib.instrument import CONTINUOUS\nfrom devlib.instrument.energy_probe import EnergyProbeInstrument\nfrom devlib.instrument.arm_energy_probe import ArmEnergyProbeInstrument\nfrom devlib.instrument.daq import DaqInstrument\nfrom devlib.instrument.acmecape import AcmeCapeInstrument\nfrom devlib.instrument.monsoon import MonsoonInstrument\nfrom devlib.platform.arm import JunoEnergyInstrument\nfrom devlib.utils.misc import which\n\nfrom wa import Instrument, Parameter\nfrom wa.framework import pluginloader\nfrom wa.framework.plugin import Plugin\nfrom wa.framework.exception import ConfigError, InstrumentError\nfrom wa.utils.types import (list_of_strings, list_of_ints, list_or_string,\n                            obj_dict, identifier, list_of_numbers)\n\n\nclass EnergyInstrumentBackend(Plugin):\n\n    name = None\n    kind = 'energy_instrument_backend'\n    parameters = []\n\n    instrument = None\n\n    def get_parameters(self):\n        return {p.name: p for p in self.parameters}\n\n    def validate_parameters(self, params):\n        pass\n\n    def get_instruments(self, target, metadir, **kwargs):\n        \"\"\"\n        Get a dict mapping device keys to an Instruments\n\n        Typically there is just a single device/instrument, in which case the\n        device key is arbitrary.\n        \"\"\"\n        return {None: self.instrument(target, **kwargs)}  # pylint: disable=not-callable\n\n\nclass DAQBackend(EnergyInstrumentBackend):\n\n    name = 'daq'\n    description = \"\"\"\n    National Instruments Data Acquisition device\n\n    For more information about the device, please see the NI website:\n\n    http://www.ni.com/data-acquisition/\n\n    This backend has been used with USB-62xx and USB-63xx devices, though other\n    models (e.g. the PCIe variants will most likely also work).\n\n    This backend relies on the daq-server running on a machinge connected to a\n    DAQ device:\n\n        https://github.com/ARM-software/daq-server\n\n    The server is necessary because DAQ devices have drivers only for Windows\n    and very specific (old) Linux kernels, so the machine interfacing with the\n    DAQ is most likely going to be different from the machinge running WA.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('resistor_values', kind=list_of_numbers,\n                  global_alias='daq_resistor_values',\n                  description=\"\"\"\n                  The values of resistors (in Ohms) across which the voltages\n                  are measured on.\n                  \"\"\"),\n        Parameter('labels', kind=list_of_strings,\n                  global_alias='daq_labels',\n                  description=\"\"\"\n                  'List of port labels. If specified, the length of the list\n                  must match the length of ``resistor_values``.\n                  \"\"\"),\n        Parameter('host', kind=str, default='localhost',\n                  global_alias='daq_server_host',\n                  description=\"\"\"\n                  The host address of the machine that runs the daq Server which\n                  the instrument communicates with.\n                  \"\"\"),\n        Parameter('port', kind=int, default=45677,\n                  global_alias='daq_server_port',\n                  description=\"\"\"\n                  The port number for daq Server in which daq instrument\n                  communicates with.\n                  \"\"\"),\n        Parameter('device_id', kind=str, default='Dev1',\n                  global_alias='daq_device_id',\n                  description=\"\"\"\n                  The ID under which the DAQ is registered with the driver.\n                  \"\"\"),\n        Parameter('v_range', kind=str, default=2.5,\n                  global_alias='daq_v_range',\n                  description=\"\"\"\n                  Specifies the voltage range for the SOC voltage channel on the\n                  DAQ (please refer to ``daq-server`` package documentation for\n                  details).\n                  \"\"\"),\n        Parameter('dv_range', kind=str, default=0.2,\n                  global_alias='daq_dv_range',\n                  description=\"\"\"\n                  Specifies the voltage range for the resistor voltage channel\n                  on the DAQ (please refer to ``daq-server`` package\n                  documentation for details).\n                  \"\"\"),\n        Parameter('sample_rate_hz', kind=int, default=10000,\n                  global_alias='daq_sampling_rate',\n                  description=\"\"\"\n                  Specify the sample rate in Hz.\n                  \"\"\"),\n        Parameter('channel_map', kind=list_of_ints,\n                  default=(0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23),\n                  global_alias='daq_channel_map',\n                  description=\"\"\"\n                  Represents mapping from  logical AI channel number to physical\n                  connector on the DAQ (varies between DAQ models). The default\n                  assumes DAQ 6363 and similar with AI channels on connectors\n                  0-7 and 16-23.\n                  \"\"\"),\n        Parameter('keep_raw', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, this will prevent the raw files obtained\n                  from the device before processing from being deleted\n                  (this is maily used for debugging).\n                  \"\"\"),\n    ]\n\n    instrument = DaqInstrument\n\n    def validate_parameters(self, params):\n        if not params.get('resistor_values'):\n            raise ConfigError('Mandatory parameter \"resistor_values\" is not set.')\n        if params.get('labels'):\n            if len(params.get('labels')) != len(params.get('resistor_values')):\n                msg = 'Number of DAQ port labels does not match the number of resistor values.'\n                raise ConfigError(msg)\n\n\nclass EnergyProbeBackend(EnergyInstrumentBackend):\n\n    name = 'energy_probe'\n    description = \"\"\"\n    Arm Energy Probe caiman version\n\n    This backend relies on caiman utility:\n\n        https://github.com/ARM-software/caiman\n\n    For more information about Arm Energy Probe please see\n\n        https://developer.arm.com/products/software-development-tools/ds-5-development-studio/streamline/arm-energy-probe\n\n    \"\"\"\n\n    parameters = [\n        Parameter('resistor_values', kind=list_of_ints,\n                  description=\"\"\"\n                  The values of resistors (in Ohms) across which the voltages\n                  are measured on.\n                  \"\"\"),\n        Parameter('labels', kind=list_of_strings,\n                  description=\"\"\"\n                  'List of port labels. If specified, the length of the list\n                  must match the length of ``resistor_values``.\n                  \"\"\"),\n        Parameter('device_entry', kind=str, default='/dev/ttyACM0',\n                  description=\"\"\"\n                  Path to /dev entry for the energy probe (it should be /dev/ttyACMx)\n                  \"\"\"),\n        Parameter('keep_raw', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, this will prevent the raw files obtained\n                  from the device before processing from being deleted\n                  (this is maily used for debugging).\n                  \"\"\"),\n    ]\n\n    instrument = EnergyProbeInstrument\n\n    def validate_parameters(self, params):\n        if not params.get('resistor_values'):\n            raise ConfigError('Mandatory parameter \"resistor_values\" is not set.')\n        if params.get('labels'):\n            if len(params.get('labels')) != len(params.get('resistor_values')):\n                msg = 'Number of Energy Probe port labels does not match the number of resistor values.'\n                raise ConfigError(msg)\n\n\nclass ArmEnergyProbeBackend(EnergyInstrumentBackend):\n\n    name = 'arm_energy_probe'\n    description = \"\"\"\n    Arm Energy Probe arm-probe version\n\n    An alternative Arm Energy Probe backend that relies on arm-probe utility:\n\n        https://git.linaro.org/tools/arm-probe.git\n\n    For more information about Arm Energy Probe please see\n\n        https://developer.arm.com/products/software-development-tools/ds-5-development-studio/streamline/arm-energy-probe\n\n\n    \"\"\"\n\n    parameters = [\n        Parameter('config_file', kind=str,\n                  description=\"\"\"\n                  Path to config file of the AEP\n                  \"\"\"),\n        Parameter('keep_raw', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, this will prevent the raw files obtained\n                  from the device before processing from being deleted\n                  (this is maily used for debugging).\n                  \"\"\"),\n    ]\n\n    instrument = ArmEnergyProbeInstrument\n\n    def get_instruments(self, target, metadir, **kwargs):\n        \"\"\"\n        Get a dict mapping device keys to an Instruments\n\n        Typically there is just a single device/instrument, in which case the\n        device key is arbitrary.\n        \"\"\"\n\n        shutil.copy(self.config_file, metadir)\n\n        return {None: self.instrument(target, **kwargs)}\n\n    def validate_parameters(self, params):\n        if not params.get('config_file'):\n            raise ConfigError('Mandatory parameter \"config_file\" is not set.')\n        self.config_file = params.get('config_file')\n        if not os.path.exists(self.config_file):\n            raise ConfigError('\"config_file\" does not exist.')\n\n\nclass AcmeCapeBackend(EnergyInstrumentBackend):\n\n    name = 'acme_cape'\n    description = \"\"\"\n    BayLibre ACME cape\n\n    This backend relies on iio-capture utility:\n\n        https://github.com/BayLibre/iio-capture\n\n    For more information about ACME cape please see:\n\n        https://baylibre.com/acme/\n\n    \"\"\"\n\n    parameters = [\n        Parameter('iio-capture', default=which('iio-capture'),\n                  description=\"\"\"\n                  Path to the iio-capture binary will be taken from the\n                  environment, if not specfied.\n                  \"\"\"),\n        Parameter('host', default='baylibre-acme.local',\n                  description=\"\"\"\n                  Host name (or IP address) of the ACME cape board.\n                  \"\"\"),\n        Parameter('iio-devices', default='iio:device0',\n                  kind=list_or_string,\n                  description=\"\"\"\n                  \"\"\"),\n        Parameter('buffer-size', kind=int, default=256,\n                  description=\"\"\"\n                  Size of the capture buffer (in KB).\n                  \"\"\"),\n        Parameter('keep_raw', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, this will prevent the raw files obtained\n                  from the device before processing from being deleted\n                  (this is maily used for debugging).\n                  \"\"\"),\n    ]\n\n    # pylint: disable=arguments-differ\n    def get_instruments(self, target, metadir,\n                        iio_capture, host, iio_devices, buffer_size, keep_raw):\n\n        #\n        # Devlib's ACME instrument uses iio-capture under the hood, which can\n        # only capture data from one IIO device at a time. Devlib's instrument\n        # API expects to produce a single CSV file for the Instrument, with a\n        # single axis of sample timestamps. These two things cannot be correctly\n        # reconciled without changing the devlib Instrument API - get_data would\n        # need to be able to return two distinct sets of data.\n        #\n        # Instead, where required WA will instantiate the ACME instrument\n        # multiple times (once for each IIO device), producing two separate CSV\n        # files. Aggregated energy info _can_ be meaningfully combined from\n        # multiple IIO devices, so we will later sum the derived stats across\n        # each of the channels reported by the instruments.\n        #\n\n        ret = {}\n        for iio_device in iio_devices:\n            ret[iio_device] = AcmeCapeInstrument(\n                target, iio_capture=iio_capture, host=host,\n                iio_device=iio_device, buffer_size=buffer_size, keep_raw=keep_raw)\n        return ret\n\n\nclass MonsoonBackend(EnergyInstrumentBackend):\n\n    name = 'monsoon'\n    description = \"\"\"\n    Monsoon Solutions power monitor\n\n    To use this instrument, you need to install the monsoon.py script available\n    from the Android Open Source Project. As of May 2017 this is under the CTS\n    repository:\n\n        https://android.googlesource.com/platform/cts/+/master/tools/utils/monsoon.py\n\n    Collects power measurements only, from a selection of two channels, the USB\n    passthrough channel and the main output channel.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('monsoon_bin', default=which('monsoon.py'),\n                  description=\"\"\"\n                  Path to monsoon.py executable. If not provided,\n                  ``PATH`` is searched.\n                  \"\"\"),\n        Parameter('tty_device', default='/dev/ttyACM0',\n                  description=\"\"\"\n                  TTY device to use to communicate with the Power\n                  Monitor. If not provided, /dev/ttyACM0 is used.\n                  \"\"\")\n    ]\n\n    instrument = MonsoonInstrument\n\n\nclass JunoEnergyBackend(EnergyInstrumentBackend):\n\n    name = 'juno_readenergy'\n    description = \"\"\"\n    Arm Juno development board on-board energy meters\n\n    For more information about Arm Juno board see:\n\n        https://developer.arm.com/products/system-design/development-boards/juno-development-board\n\n    \"\"\"\n\n    instrument = JunoEnergyInstrument\n\n\nclass EnergyMeasurement(Instrument):\n\n    name = 'energy_measurement'\n\n    description = \"\"\"\n    This instrument is designed to be used as an interface to the various\n    energy measurement instruments located in devlib.\n\n    This instrument should be used to provide configuration for any of the\n    Energy Instrument Backends rather than specifying configuration directly.\n    \"\"\"\n\n    parameters = [\n        Parameter('instrument', kind=str, mandatory=True,\n                  allowed_values=['daq', 'energy_probe', 'acme_cape', 'monsoon', 'juno_readenergy', 'arm_energy_probe'],\n                  description=\"\"\"\n                  Specify the energy instruments to be enabled.\n                  \"\"\"),\n        Parameter('instrument_parameters', kind=dict, default={},\n                  description=\"\"\"\n                  Specify the parameters used to initialize the desired\n                  instruments. To see parameters available for a particular\n                  instrument, run\n\n                        wa show <instrument name>\n\n                  See help for ``instrument`` parameter to see available\n                  options for <instrument name>.\n\n                   \"\"\"),\n        Parameter('sites', kind=list_or_string,\n                  description=\"\"\"\n                  Specify which sites measurements should be collected\n                  from, if not specified the measurements will be\n                  collected for all available sites.\n                  \"\"\"),\n        Parameter('kinds', kind=list_or_string,\n                  description=\"\"\"\n                  Specify the kinds of measurements should be collected,\n                  if not specified measurements will be\n                  collected for all available kinds.\n                  \"\"\"),\n        Parameter('channels', kind=list_or_string,\n                  description=\"\"\"\n                  Specify the channels to be collected,\n                  if not specified the measurements will be\n                  collected for all available channels.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, loader=pluginloader, **kwargs):\n        super(EnergyMeasurement, self).__init__(target, **kwargs)\n        self.instruments = None\n        self.measurement_csvs = {}\n        self.loader = loader\n        self.backend = self.loader.get_plugin(self.instrument)\n        self.params = obj_dict()\n\n        instrument_parameters = {identifier(k): v\n                                 for k, v in self.instrument_parameters.items()}\n        supported_params = self.backend.get_parameters()\n        for name, param in supported_params.items():\n            value = instrument_parameters.pop(name, None)\n            param.set_value(self.params, value)\n        if instrument_parameters:\n            msg = 'Unexpected parameters for backend \"{}\": {}'\n            raise ConfigError(msg.format(self.instrument, instrument_parameters))\n        self.backend.validate_parameters(self.params)\n\n    def initialize(self, context):\n        self.instruments = self.backend.get_instruments(self.target, context.run_output.metadir, **self.params)\n\n        for instrument in self.instruments.values():\n            if not (instrument.mode & CONTINUOUS):  # pylint: disable=superfluous-parens\n                msg = '{} instrument does not support continuous measurement collection'\n                raise ConfigError(msg.format(self.instrument))\n            instrument.setup()\n\n        for channel in self.channels or []:\n            # Check that the expeccted channels exist.\n            # If there are multiple Instruments, they were all constructed with\n            # the same channels param, so check them all.\n            for instrument in self.instruments.values():\n                if not instrument.get_channels(channel):\n                    raise ConfigError('No channels found for \"{}\"'.format(channel))\n\n    def setup(self, context):\n        for instrument in self.instruments.values():\n            instrument.reset(sites=self.sites,\n                             kinds=self.kinds,\n                             channels=self.channels)\n\n    def start(self, context):\n        for instrument in self.instruments.values():\n            instrument.start()\n\n    def stop(self, context):\n        for instrument in self.instruments.values():\n            instrument.stop()\n\n    def update_output(self, context):\n        for device, instrument in self.instruments.items():\n            # Append the device key to the filename and artifact name, unless\n            # it's None (as it will be for backends with only 1\n            # devce/instrument)\n            if len(self.instruments) > 1:\n                name = 'energy_instrument_output_{}'.format(device)\n            else:\n                name = 'energy_instrument_output'\n\n            outfile = os.path.join(context.output_directory, '{}.csv'.format(name))\n            measurements = instrument.get_data(outfile)\n            if not measurements:\n                raise InstrumentError(\"Failed to collect energy data from {}\"\n                                      .format(self.backend.name))\n\n            self.measurement_csvs[device] = measurements\n            context.add_artifact(name, measurements.path, 'data',\n                                 classifiers={'device': device})\n        self.extract_metrics(context)\n\n    def extract_metrics(self, context):\n        metrics_by_name = defaultdict(list)\n\n        for device in self.instruments:\n            csv = self.measurement_csvs[device]\n            derived_measurements = DerivedEnergyMeasurements.process(csv)\n            for meas in derived_measurements:\n                # Append the device key to the metric name, unless it's None (as\n                # it will be for backends with only 1 devce/instrument)\n                if len(self.instruments) > 1:\n                    metric_name = '{}_{}'.format(meas.name, device)\n                else:\n                    metric_name = meas.name\n                context.add_metric(metric_name, meas.value, meas.units,\n                                   classifiers={'device': device})\n\n                metrics_by_name[meas.name].append(meas)\n\n        # Where we have multiple instruments, add up all the metrics with the\n        # same name. For instance with ACME we may have multiple IIO devices\n        # each reporting 'device_energy' and 'device_power', so sum them up to\n        # produce aggregated energy and power metrics.\n        # (Note that metrics_by_name uses the metric name originally reported by\n        #  the devlib instrument, before we potentially appended a device key to\n        #  it)\n        if len(self.instruments) > 1:\n            for name, metrics in metrics_by_name.items():\n                units = metrics[0].units\n                value = sum(m.value for m in metrics)\n                context.add_metric(name, value, units)\n\n    def teardown(self, context):\n        for instrument in self.instruments.values():\n            instrument.teardown()\n"
  },
  {
    "path": "wa/instruments/fps.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport shutil\n\nfrom devlib import SurfaceFlingerFramesInstrument, GfxInfoFramesInstrument\nfrom devlib import DerivedSurfaceFlingerStats, DerivedGfxInfoStats\n\nfrom wa import Instrument, Parameter, WorkloadError\nfrom wa.utils.types import numeric\n\n\nclass FpsInstrument(Instrument):\n\n    name = 'fps'\n    description = \"\"\"\n    Measures Frames Per Second (FPS) and associated metrics for a workload.\n\n    .. note:: This instrument depends on pandas Python library (which is not part of standard\n              WA dependencies), so you will need to install that first, before you can use it.\n\n    Android L and below use SurfaceFlinger to calculate the FPS data.\n    Android M and above use gfxinfo to calculate the FPS data.\n\n    SurfaceFlinger:\n    The view is specified by the workload as ``view`` attribute. This defaults\n    to ``'SurfaceView'`` for game workloads, and ``None`` for non-game\n    workloads (as for them FPS mesurement usually doesn't make sense).\n    Individual workloads may override this.\n\n    gfxinfo:\n    The view is specified by the workload as ``package`` attribute.\n    This is because gfxinfo already processes for all views in a package.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('drop_threshold', kind=numeric, default=5,\n                  description=\"\"\"\n                  Data points below this FPS will be dropped as they do not\n                  constitute \"real\" gameplay. The assumption being that while\n                  actually running, the FPS in the game will not drop below X\n                  frames per second, except on loading screens, menus, etc,\n                  which should not contribute to FPS calculation.\n                  \"\"\"),\n        Parameter('keep_raw', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, this will keep the raw dumpsys output in\n                  the results directory (this is maily used for debugging)\n                  Note: frames.csv with collected frames data will always be\n                  generated regardless of this setting.\n                   \"\"\"),\n        Parameter('crash_threshold', kind=float, default=0.7,\n                  description=\"\"\"\n                  Specifies the threshold used to decided whether a\n                  measured/expected frames ration indicates a content crash.\n                  E.g. a value of ``0.75`` means the number of actual frames\n                  counted is a quarter lower than expected, it will treated as\n                  a content crash.\n\n                  If set to zero, no crash check will be performed.\n                  \"\"\"),\n        Parameter('period', kind=float, default=2, constraint=lambda x: x > 0,\n                  description=\"\"\"\n                  Specifies the time period between polling frame data in\n                  seconds when collecting frame data. Using a lower value\n                  improves the granularity of timings when recording actions\n                  that take a short time to complete.  Note, this will produce\n                  duplicate frame data in the raw dumpsys output, however, this\n                  is filtered out in frames.csv.  It may also affect the\n                  overall load on the system.\n\n                  The default value of 2 seconds corresponds with the\n                  NUM_FRAME_RECORDS in\n                  android/services/surfaceflinger/FrameTracker.h (as of the\n                  time of writing currently 128) and a frame rate of 60 fps\n                  that is applicable to most devices.\n                  \"\"\"),\n        Parameter('force_surfaceflinger', kind=bool, default=False,\n                  description=\"\"\"\n                  By default, the method to capture fps data is based on\n                  Android version.  If this is set to true, force the\n                  instrument to use the SurfaceFlinger method regardless of its\n                  Android version.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(FpsInstrument, self).__init__(target, **kwargs)\n        self.collector = None\n        self.processor = None\n        self._is_enabled = None\n\n    def setup(self, context):\n        use_gfxinfo = self.target.get_sdk_version() >= 23 and not self.force_surfaceflinger\n        if use_gfxinfo:\n            collector_target_attr = 'package'\n        else:\n            collector_target_attr = 'view'\n        collector_target = getattr(context.workload, collector_target_attr, None)\n\n        if not collector_target:\n            self._is_enabled = False\n            msg = 'Workload {} does not define a {}; disabling frame collection and FPS evaluation.'\n            self.logger.info(msg.format(context.workload.name, collector_target_attr))\n            return\n\n        self._is_enabled = True\n        if use_gfxinfo:\n            self.collector = GfxInfoFramesInstrument(self.target, collector_target, self.period)\n            self.processor = DerivedGfxInfoStats(self.drop_threshold, filename='fps.csv')\n        else:\n            self.collector = SurfaceFlingerFramesInstrument(self.target, collector_target, self.period)\n            self.processor = DerivedSurfaceFlingerStats(self.drop_threshold, filename='fps.csv')\n        self.collector.reset()\n\n    def start(self, context):  # pylint: disable=unused-argument\n        if not self._is_enabled:\n            return\n        self.collector.start()\n\n    def stop(self, context):  # pylint: disable=unused-argument\n        if not self._is_enabled:\n            return\n        self.collector.stop()\n\n    def update_output(self, context):\n        if not self._is_enabled:\n            return\n        outpath = os.path.join(context.output_directory, 'frames.csv')\n        frames_csv = self.collector.get_data(outpath)\n        raw_output = self.collector.get_raw()\n\n        processed = self.processor.process(frames_csv)\n        processed.extend(self.processor.process_raw(*raw_output))\n        fps, frame_count, fps_csv = processed[:3]\n        rest = processed[3:]\n\n        context.add_metric(fps.name, fps.value, fps.units)\n        context.add_metric(frame_count.name, frame_count.value, frame_count.units)\n        context.add_artifact('frames', frames_csv.path, kind='raw')\n        context.add_artifact('fps', fps_csv.path, kind='data')\n        for metric in rest:\n            context.add_metric(metric.name, metric.value, metric.units, lower_is_better=True)\n\n        if not self.keep_raw:\n            for entry in raw_output:\n                if os.path.isdir(entry):\n                    shutil.rmtree(entry)\n                elif os.path.isfile(entry):\n                    os.remove(entry)\n\n        if not frame_count.value:\n            context.add_event('Could not find frames data in gfxinfo output')\n            context.set_status('PARTIAL')\n\n        self.check_for_crash(context, fps.value, frame_count.value,\n                             context.current_job.run_time.total_seconds())\n\n    def check_for_crash(self, context, fps, frames, exec_time):\n        if not self.crash_threshold:\n            return\n        self.logger.debug('Checking for crashed content.')\n        if all([exec_time, fps, frames]):\n            expected_frames = fps * exec_time\n            ratio = frames / expected_frames\n            self.logger.debug('actual/expected frames: {:.2}'.format(ratio))\n            if ratio < self.crash_threshold:\n                msg = 'Content for {} appears to have crashed.\\n'.format(context.current_job.spec.label)\n                msg += 'Content crash detected (actual/expected frames: {:.2}).'.format(ratio)\n                raise WorkloadError(msg)\n"
  },
  {
    "path": "wa/instruments/hwmon.py",
    "content": "#    Copyright 2017-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom devlib import HwmonInstrument as _Instrument\n\nfrom wa import Instrument\nfrom wa.framework.instrument import fast\n\nMOMENTARY_QUANTITIES = ['temperature', 'power', 'voltage', 'current', 'fps']\nCUMULATIVE_QUANTITIES = ['energy', 'tx', 'tx/rx', 'frames']\n\n\nclass HwmonInstrument(Instrument):\n    name = 'hwmon'\n\n    description = \"\"\"\n    Hardware Monitor (hwmon) is a generic Linux kernel subsystem,\n    providing access to hardware monitoring components like temperature or\n    voltage/current sensors.\n\n    Data from hwmon that are a snapshot of a fluctuating value, such as\n    temperature and voltage, are reported once at the beginning and once at the\n    end of the workload run. Data that are a cumulative total of a quantity,\n    such as energy (which is the cumulative total of power consumption), are\n    reported as the difference between the values at the beginning and at the\n    end of the workload run.\n\n    There is currently no functionality to filter sensors: all of the available\n    hwmon data will be reported.\n    \"\"\"\n\n    def initialize(self, context):  # pylint: disable=unused-argument\n        self.instrument = _Instrument(self.target)\n\n    def setup(self, context):  # pylint: disable=unused-argument\n        self.instrument.reset()\n\n    @fast\n    def start(self, context):  # pylint: disable=unused-argument\n        self.before = self.instrument.take_measurement()\n\n    @fast\n    def stop(self, context):  # pylint: disable=unused-argument\n        self.after = self.instrument.take_measurement()\n\n    def update_output(self, context):\n        measurements_before = {m.channel.label: m for m in self.before}\n        measurements_after = {m.channel.label: m for m in self.after}\n\n        if list(measurements_before.keys()) != list(measurements_after.keys()):\n            self.logger.warning(\n                'hwmon before/after measurements returned different entries!')\n\n        for label, measurement_after in measurements_after.items():\n            if label not in measurements_before:\n                continue  # We've already warned about this\n            measurement_before = measurements_before[label]\n\n            if measurement_after.channel.kind in MOMENTARY_QUANTITIES:\n                context.add_metric('{}_before'.format(label),\n                                   measurement_before.value,\n                                   measurement_before.channel.units)\n                context.add_metric('{}_after'.format(label),\n                                   measurement_after.value,\n                                   measurement_after.channel.units)\n\n            elif measurement_after.channel.kind in CUMULATIVE_QUANTITIES:\n                diff = measurement_after.value - measurement_before.value\n                context.add_metric(label, diff, measurement_after.channel.units)\n\n            else:\n                self.logger.warning(\n                    \"Don't know what to do with hwmon channel '{}'\"\n                    .format(measurement_after.channel))\n\n    def teardown(self, context):  # pylint: disable=unused-argument\n        self.instrument.teardown()\n"
  },
  {
    "path": "wa/instruments/misc.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=W0613,no-member,attribute-defined-outside-init\n\"\"\"\n\nSome \"standard\" instruments to collect additional info about workload execution.\n\n.. note:: The run() method of a Workload may perform some \"boilerplate\" as well as\n          the actual execution of the workload (e.g. it may contain UI automation\n          needed to start the workload). This \"boilerplate\" execution will also\n          be measured by these instruments. As such, they are not suitable for collected\n          precise data about specific operations.\n\"\"\"\nimport os\nimport logging\nimport time\nimport tarfile\nfrom subprocess import CalledProcessError\n\nfrom devlib.exception import TargetError\n\nfrom wa import Instrument, Parameter, very_fast\nfrom wa.framework.exception import ConfigError\nfrom wa.framework.instrument import slow\nfrom wa.utils.diff import diff_sysfs_dirs, diff_interrupt_files\nfrom wa.utils.misc import as_relative, safe_extract\nfrom wa.utils.misc import ensure_file_directory_exists as _f\nfrom wa.utils.misc import ensure_directory_exists as _d\nfrom wa.utils.types import list_of_strings\nfrom wa.utils.android import get_cacheable_apk_info\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass SysfsExtractor(Instrument):\n\n    name = 'sysfs_extractor'\n    description = \"\"\"\n    Collects the contest of a set of directories, before and after workload execution\n    and diffs the result.\n\n    \"\"\"\n\n    mount_command = 'mount -t tmpfs -o size={} tmpfs {}'\n    extract_timeout = 30\n    tarname = 'sysfs.tar.gz'\n    DEVICE_PATH = 0\n    BEFORE_PATH = 1\n    AFTER_PATH = 2\n    DIFF_PATH = 3\n\n    parameters = [\n        Parameter('paths', kind=list_of_strings, mandatory=True,\n                  description=\"\"\"A list of paths to be pulled from the device. These could be directories\n                                as well as files.\"\"\",\n                  global_alias='sysfs_extract_dirs'),\n        Parameter('use_tmpfs', kind=bool, default=None,\n                  description=\"\"\"\n                  Specifies whether tmpfs should be used to cache sysfile trees and then pull them down\n                  as a tarball. This is significantly faster then just copying the directory trees from\n                  the device directly, but requires root and may not work on all devices. Defaults to\n                  ``True`` if the device is rooted and ``False`` if it is not.\n                  \"\"\"),\n        Parameter('tmpfs_mount_point', default=None,\n                  description=\"\"\"Mount point for tmpfs partition used to store snapshots of paths.\"\"\"),\n        Parameter('tmpfs_size', default='32m',\n                  description=\"\"\"Size of the tempfs partition.\"\"\"),\n    ]\n\n    def initialize(self, context):\n        if not self.target.is_rooted and self.use_tmpfs:  # pylint: disable=access-member-before-definition\n            raise ConfigError('use_tempfs must be False for an unrooted device.')\n        elif self.use_tmpfs is None:  # pylint: disable=access-member-before-definition\n            self.use_tmpfs = self.target.is_rooted\n\n        if self.use_tmpfs:\n            self.on_device_before = self.target.path.join(self.tmpfs_mount_point, 'before')\n            self.on_device_after = self.target.path.join(self.tmpfs_mount_point, 'after')\n\n            if not self.target.file_exists(self.tmpfs_mount_point):\n                self.target.execute('mkdir -p {}'.format(self.tmpfs_mount_point), as_root=True)\n                self.target.execute(self.mount_command.format(self.tmpfs_size, self.tmpfs_mount_point),\n                                    as_root=True)\n\n    def setup(self, context):\n        before_dirs = [\n            _d(os.path.join(context.output_directory, 'before', self._local_dir(d)))\n            for d in self.paths\n        ]\n        after_dirs = [\n            _d(os.path.join(context.output_directory, 'after', self._local_dir(d)))\n            for d in self.paths\n        ]\n        diff_dirs = [\n            _d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))\n            for d in self.paths\n        ]\n        self.device_and_host_paths = list(zip(self.paths, before_dirs, after_dirs, diff_dirs))\n\n        if self.use_tmpfs:\n            for d in self.paths:\n                before_dir = self.target.path.join(self.on_device_before,\n                                                   self.target.path.dirname(as_relative(d)))\n                after_dir = self.target.path.join(self.on_device_after,\n                                                  self.target.path.dirname(as_relative(d)))\n                if self.target.file_exists(before_dir):\n                    self.target.execute('rm -rf  {}'.format(before_dir), as_root=True)\n                self.target.execute('mkdir -p {}'.format(before_dir), as_root=True)\n                if self.target.file_exists(after_dir):\n                    self.target.execute('rm -rf  {}'.format(after_dir), as_root=True)\n                self.target.execute('mkdir -p {}'.format(after_dir), as_root=True)\n\n    @slow\n    def start(self, context):\n        if self.use_tmpfs:\n            for d in self.paths:\n                dest_dir = self.target.path.join(self.on_device_before, as_relative(d))\n                if '*' in dest_dir:\n                    dest_dir = self.target.path.dirname(dest_dir)\n                self.target.execute('{} cp -Hr {} {}'.format(self.target.busybox, d, dest_dir),\n                                    as_root=True, check_exit_code=False)\n        else:  # not rooted\n            for dev_dir, before_dir, _, _ in self.device_and_host_paths:\n                self.target.pull(dev_dir, before_dir)\n\n    @slow\n    def stop(self, context):\n        if self.use_tmpfs:\n            for d in self.paths:\n                dest_dir = self.target.path.join(self.on_device_after, as_relative(d))\n                if '*' in dest_dir:\n                    dest_dir = self.target.path.dirname(dest_dir)\n                self.target.execute('{} cp -Hr {} {}'.format(self.target.busybox, d, dest_dir),\n                                    as_root=True, check_exit_code=False)\n        else:  # not using tmpfs\n            for dev_dir, _, after_dir, _ in self.device_and_host_paths:\n                self.target.pull(dev_dir, after_dir)\n\n    def update_output(self, context):\n        if self.use_tmpfs:\n            on_device_tarball = self.target.path.join(self.target.working_directory, self.tarname)\n            on_host_tarball = self.target.path.join(context.output_directory, self.tarname)\n            self.target.execute('{} tar czf {} -C {} .'.format(self.target.busybox,\n                                                               on_device_tarball,\n                                                               self.tmpfs_mount_point),\n                                as_root=True)\n            self.target.execute('chmod 0777 {}'.format(on_device_tarball), as_root=True)\n            self.target.pull(on_device_tarball, on_host_tarball)\n            with tarfile.open(on_host_tarball, 'r:gz') as tf:\n                safe_extract(tf, context.output_directory)\n            self.target.remove(on_device_tarball)\n            os.remove(on_host_tarball)\n\n        for paths in self.device_and_host_paths:\n            after_dir = paths[self.AFTER_PATH]\n            dev_dir = paths[self.DEVICE_PATH].strip('*')  # remove potential trailing '*'\n            if (not os.listdir(after_dir)\n                    and self.target.file_exists(dev_dir)\n                    and self.target.list_directory(dev_dir)):\n                self.logger.error('sysfs files were not pulled from the device.')\n                self.device_and_host_paths.remove(paths)  # Path is removed to skip diffing it\n        for dev_dir, before_dir, after_dir, diff_dir in self.device_and_host_paths:\n            diff_sysfs_dirs(before_dir, after_dir, diff_dir)\n            context.add_artifact('{} [before]'.format(dev_dir), before_dir,\n                                 kind='data', classifiers={'stage': 'before'})\n            context.add_artifact('{} [after]'.format(dev_dir), after_dir,\n                                 kind='data', classifiers={'stage': 'after'})\n            context.add_artifact('{} [diff]'.format(dev_dir), diff_dir,\n                                 kind='data', classifiers={'stage': 'diff'})\n\n    def teardown(self, context):\n        self._one_time_setup_done = []\n\n    def finalize(self, context):\n        if self.use_tmpfs:\n            try:\n                self.target.execute('umount {}'.format(self.tmpfs_mount_point), as_root=True)\n            except (TargetError, CalledProcessError):\n                # assume a directory but not mount point\n                pass\n            self.target.execute('rm -rf {}'.format(self.tmpfs_mount_point),\n                                as_root=True, check_exit_code=False)\n\n    def validate(self):\n        if not self.tmpfs_mount_point:  # pylint: disable=access-member-before-definition\n            self.tmpfs_mount_point = self.target.get_workpath('temp-fs')\n\n    def _local_dir(self, directory):\n        return os.path.dirname(as_relative(directory).replace(self.target.path.sep, os.sep))\n\n\nclass ExecutionTimeInstrument(Instrument):\n\n    name = 'execution_time'\n    description = \"\"\"\n    Measure how long it took to execute the run() methods of a Workload.\n\n    \"\"\"\n\n    def __init__(self, target, **kwargs):\n        super(ExecutionTimeInstrument, self).__init__(target, **kwargs)\n        self.start_time = None\n        self.end_time = None\n\n    @very_fast\n    def start(self, context):\n        self.start_time = time.time()\n\n    @very_fast\n    def stop(self, context):\n        self.end_time = time.time()\n\n    def update_output(self, context):\n        execution_time = self.end_time - self.start_time\n        context.add_metric('execution_time', execution_time, 'seconds')\n\n\nclass ApkVersion(Instrument):\n\n    name = 'apk_version'\n    description = \"\"\"\n    Extracts APK versions for workloads that have them.\n\n    \"\"\"\n\n    def __init__(self, device, **kwargs):\n        super(ApkVersion, self).__init__(device, **kwargs)\n        self.apk_info = None\n\n    def setup(self, context):\n        if hasattr(context.workload, 'apk_file'):\n            self.apk_info = get_cacheable_apk_info(context.workload.apk_file)\n        else:\n            self.apk_info = None\n\n    def update_output(self, context):\n        if self.apk_info:\n            context.result.add_metric(self.name, self.apk_info.version_name)\n\n\nclass InterruptStatsInstrument(Instrument):\n\n    name = 'interrupts'\n    description = \"\"\"\n    Pulls the ``/proc/interrupts`` file before and after workload execution and diffs them\n    to show what interrupts  occurred during that time.\n\n    \"\"\"\n\n    def __init__(self, target, **kwargs):\n        super(InterruptStatsInstrument, self).__init__(target, **kwargs)\n        self.before_file = None\n        self.after_file = None\n        self.diff_file = None\n\n    def setup(self, context):\n        self.before_file = os.path.join(context.output_directory, 'before', 'proc', 'interrupts')\n        self.after_file = os.path.join(context.output_directory, 'after', 'proc', 'interrupts')\n        self.diff_file = os.path.join(context.output_directory, 'diff', 'proc', 'interrupts')\n\n    def start(self, context):\n        with open(_f(self.before_file), 'w') as wfh:\n            wfh.write(self.target.execute('cat /proc/interrupts'))\n\n    def stop(self, context):\n        with open(_f(self.after_file), 'w') as wfh:\n            wfh.write(self.target.execute('cat /proc/interrupts'))\n\n    def update_output(self, context):\n        context.add_artifact('interrupts [before]', self.before_file, kind='data',\n                             classifiers={'stage': 'before'})\n        # If workload execution failed, the after_file may not have been created.\n        if os.path.isfile(self.after_file):\n            diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))\n            context.add_artifact('interrupts [after]', self.after_file, kind='data',\n                                 classifiers={'stage': 'after'})\n            context.add_artifact('interrupts [diff]', self.diff_file, kind='data',\n                                 classifiers={'stage': 'diff'})\n\n\nclass DynamicFrequencyInstrument(SysfsExtractor):\n\n    name = 'cpufreq'\n    description = \"\"\"\n    Collects dynamic frequency (DVFS) settings before and after workload execution.\n\n    \"\"\"\n\n    tarname = 'cpufreq.tar.gz'\n\n    parameters = [\n        Parameter('paths', mandatory=False, override=True),\n    ]\n\n    def setup(self, context):\n        self.paths = ['/sys/devices/system/cpu']\n        if self.use_tmpfs:\n            self.paths.append('/sys/class/devfreq/*')  # the '*' would cause problems for adb pull.\n        super(DynamicFrequencyInstrument, self).setup(context)\n\n    def validate(self):\n        super(DynamicFrequencyInstrument, self).validate()\n        if not self.tmpfs_mount_point.endswith('-cpufreq'):  # pylint: disable=access-member-before-definition\n            self.tmpfs_mount_point += '-cpufreq'\n"
  },
  {
    "path": "wa/instruments/perf.py",
    "content": "#    Copyright 2013-2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=unused-argument\nimport csv\nimport os\nimport re\n\nfrom devlib.collector.perf import PerfCollector\n\nfrom wa import Instrument, Parameter, ConfigError\nfrom wa.utils.types import list_or_string, list_of_strs, numeric\n\nPERF_COUNT_REGEX = re.compile(r'^(CPU\\d+)?\\s*(\\d+)\\s*(.*?)\\s*(\\[\\s*\\d+\\.\\d+%\\s*\\])?\\s*$')\n\n\nclass PerfInstrument(Instrument):\n\n    name = 'perf'\n    description = \"\"\"\n    Perf is a Linux profiling tool with performance counters.\n    Simpleperf is an Android profiling tool with performance counters.\n\n    It is highly recomended to use perf_type = simpleperf when using this instrument\n    on android devices since it recognises android symbols in record mode and is much more stable\n    when reporting record .data files. For more information see simpleperf documentation at:\n    https://android.googlesource.com/platform/system/extras/+/master/simpleperf/doc/README.md\n\n    Performance counters are CPU hardware registers that count hardware events\n    such as instructions executed, cache-misses suffered, or branches\n    mispredicted. They form a basis for profiling applications to trace dynamic\n    control flow and identify hotspots.\n\n    perf accepts options and events. If no option is given the default '-a' is\n    used. For events, the default events for perf are migrations and cs. The default\n    events for simpleperf are raw-cpu-cycles, raw-l1-dcache, raw-l1-dcache-refill, raw-instructions-retired.\n    They both can be specified in the config file.\n\n    Events must be provided as a list that contains them and they will look like\n    this ::\n\n        (for perf_type = perf ) perf_events = ['migrations', 'cs']\n        (for perf_type = simpleperf) perf_events = ['raw-cpu-cycles', 'raw-l1-dcache']\n\n\n    Events can be obtained by typing the following in the command line on the\n    device ::\n\n        perf list\n        simpleperf list\n\n    Whereas options, they can be provided as a single string as following ::\n\n        perf_options = '-a -i'\n        perf_options = '--app com.adobe.reader'\n\n    Options can be obtained by running the following in the command line ::\n\n        man perf-stat\n    \"\"\"\n\n    parameters = [\n        Parameter('perf_type', kind=str, allowed_values=['perf', 'simpleperf'], default='perf',\n                  global_alias='perf_type', description=\"\"\"Specifies which type of perf binaries\n                  to install. Use simpleperf for collecting perf data on android systems.\"\"\"),\n        Parameter('command', kind=str, default='stat', allowed_values=['stat', 'record'],\n                  global_alias='perf_command', description=\"\"\"Specifies which perf command to use. If in record mode\n                  report command will also be executed and results pulled from target along with raw data\n                  file\"\"\"),\n        Parameter('events', kind=list_of_strs, global_alias='perf_events',\n                  description=\"\"\"Specifies the events to be counted.\"\"\"),\n        Parameter('optionstring', kind=list_or_string, default='-a',\n                  global_alias='perf_options',\n                  description=\"\"\"Specifies options to be used for the perf command. This\n                  may be a list of option strings, in which case, multiple instances of perf\n                  will be kicked off -- one for each option string. This may be used to e.g.\n                  collected different events from different big.LITTLE clusters. In order to\n                  profile a particular application process for android with simpleperf use\n                  the --app option e.g. --app com.adobe.reader\n                  \"\"\"),\n        Parameter('report_option_string', kind=str, global_alias='perf_report_options', default=None,\n                  description=\"\"\"Specifies options to be used to gather report when record command\n                  is used. It's highly recommended to use perf_type simpleperf when running on\n                  android devices as reporting options are unstable with perf\"\"\"),\n        Parameter('run_report_sample', kind=bool, default=False, description=\"\"\"If true, run\n                  'perf/simpleperf report-sample'. It only works with the record command.\"\"\"),\n        Parameter('report_sample_options', kind=str, default=None,\n                  description=\"\"\"Specifies options to pass to report-samples when run_report_sample\n                  is true.\"\"\"),\n        Parameter('labels', kind=list_of_strs, default=None,\n                  global_alias='perf_labels',\n                  description=\"\"\"Provides labels for perf/simpleperf output for each optionstring.\n                  If specified, the number of labels must match the number of ``optionstring``\\ s.\n                  \"\"\"),\n        Parameter('force_install', kind=bool, default=False,\n                  description=\"\"\"\n                  always install perf binary even if perf is already present on the device.\n                  \"\"\"),\n        Parameter('validate_pmu_events', kind=bool, default=True,\n                  description=\"\"\"\n                  Query the hardware capabilities to verify the specified PMU events.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(PerfInstrument, self).__init__(target, **kwargs)\n        self.collector = None\n        self.outdir = None\n\n    def validate(self):\n        if self.report_option_string and (self.command != \"record\"):\n            raise ConfigError(\"report_option_string only works with perf/simpleperf record. Set command to record or remove report_option_string\")\n        if self.report_sample_options and (self.command != \"record\"):\n            raise ConfigError(\"report_sample_options only works with perf/simpleperf record. Set command to record or remove report_sample_options\")\n        if self.run_report_sample and (self.command != \"record\"):\n            raise ConfigError(\"run_report_sample only works with perf/simpleperf record. Set command to record or remove run_report_sample\")\n\n    def initialize(self, context):\n        if self.report_sample_options:\n            self.run_report_sample = True\n\n        self.collector = PerfCollector(self.target,\n                                       self.perf_type,\n                                       self.command,\n                                       self.events,\n                                       self.optionstring,\n                                       self.report_option_string,\n                                       self.run_report_sample,\n                                       self.report_sample_options,\n                                       self.labels,\n                                       self.force_install,\n                                       self.validate_pmu_events)\n\n    def setup(self, context):\n        self.outdir = os.path.join(context.output_directory, self.perf_type)\n        self.collector.set_output(self.outdir)\n        self.collector.reset()\n\n    def start(self, context):\n        self.collector.start()\n\n    def stop(self, context):\n        self.collector.stop()\n\n    def update_output(self, context):\n        self.logger.info('Extracting reports from target...')\n        self.collector.get_data()\n\n        if self.perf_type == 'perf':\n            self._process_perf_output(context)\n        else:\n            self._process_simpleperf_output(context)\n\n    def teardown(self, context):\n        self.collector.reset()\n\n    def _process_perf_output(self, context):\n        if self.command == 'stat':\n            self._process_perf_stat_output(context)\n        elif self.command == 'record':\n            self._process_perf_record_output(context)\n\n    def _process_simpleperf_output(self, context):\n        if self.command == 'stat':\n            self._process_simpleperf_stat_output(context)\n        elif self.command == 'record':\n            self._process_simpleperf_record_output(context)\n\n    def _process_perf_stat_output(self, context):\n        for host_file in os.listdir(self.outdir):\n            label = host_file.split('.out')[0]\n            host_file_path = os.path.join(self.outdir, host_file)\n            context.add_artifact(label, host_file_path, 'raw')\n            with open(host_file_path) as fh:\n                in_results_section = False\n                for line in fh:\n                    if 'Performance counter stats' in line:\n                        in_results_section = True\n                        next(fh)  # skip the following blank line\n                    if not in_results_section:\n                        continue\n                    if not line.strip():  # blank line\n                        in_results_section = False\n                        break\n                    else:\n                        self._add_perf_stat_metric(line, label, context)\n\n    @staticmethod\n    def _add_perf_stat_metric(line, label, context):\n        line = line.split('#')[0]  # comment\n        match = PERF_COUNT_REGEX.search(line)\n        if not match:\n            return\n        classifiers = {}\n        cpu = match.group(1)\n        if cpu is not None:\n            classifiers['cpu'] = int(cpu.replace('CPU', ''))\n        count = int(match.group(2))\n        metric = '{}_{}'.format(label, match.group(3))\n        context.add_metric(metric, count, classifiers=classifiers)\n\n    def _process_perf_record_output(self, context):\n        for host_file in os.listdir(self.outdir):\n            label, ext = os.path.splitext(host_file)\n            context.add_artifact(label, os.path.join(self.outdir, host_file), 'raw')\n            column_headers = []\n            column_header_indeces = []\n            event_type = ''\n            if ext == '.rpt':\n                with open(os.path.join(self.outdir, host_file)) as fh:\n                    for line in fh:\n                        words = line.split()\n                        if not words:\n                            continue\n                        event_type = self._get_report_event_type(words, event_type)\n                        column_headers = self._get_report_column_headers(column_headers, words, 'perf')\n                        for column_header in column_headers:\n                            column_header_indeces.append(line.find(column_header))\n                        self._add_report_metric(column_headers,\n                                                column_header_indeces,\n                                                line,\n                                                words,\n                                                context,\n                                                event_type,\n                                                label)\n\n    @staticmethod\n    def _get_report_event_type(words, event_type):\n        if words[0] != '#':\n            return event_type\n        if len(words) == 6 and words[4] == 'event':\n            event_type = words[5]\n            event_type = event_type.strip(\"'\")\n        return event_type\n\n    def _process_simpleperf_stat_output(self, context):\n        labels = []\n        for host_file in os.listdir(self.outdir):\n            labels.append(host_file.split('.out')[0])\n        for opts, label in zip(self.optionstring, labels):\n            stat_file = os.path.join(self.outdir, '{}{}'.format(label, '.out'))\n            if '--csv' in opts:\n                self._process_simpleperf_stat_from_csv(stat_file, context, label)\n            else:\n                self._process_simpleperf_stat_from_raw(stat_file, context, label)\n\n    @staticmethod\n    def _process_simpleperf_stat_from_csv(stat_file, context, label):\n        with open(stat_file) as csv_file:\n            readCSV = csv.reader(csv_file, delimiter=',')\n            line_num = 0\n            for row in readCSV:\n                if 'Performance counter statistics' not in row and 'Total test time' not in row:\n                    classifiers = {}\n                    if '%' in row:\n                        classifiers['scaled from(%)'] = row[len(row) - 2].replace('(', '').replace(')', '').replace('%', '')\n                    context.add_metric('{}_{}'.format(label, row[1]), row[0], 'count', classifiers=classifiers)\n                line_num += 1\n\n    @staticmethod\n    def _process_simpleperf_stat_from_raw(stat_file, context, label):\n        with open(stat_file) as fh:\n            for line in fh:\n                if '#' in line and not line.startswith('#'):\n                    units = 'count'\n                    if \"(ms)\" in line:\n                        line = line.replace(\"(ms)\", \"\")\n                        units = 'ms'\n                    tmp_line = line.split('#')[0]\n                    tmp_line = line.strip()\n                    count, metric = tmp_line.split(' ')[0], tmp_line.split(' ')[2]\n                    count = float(count) if \".\" in count else int(count.replace(',', ''))\n                    classifiers = {}\n                    if '%' in line:\n                        scaled_percentage = line.split('(')[1].strip().replace(')', '').replace('%', '')\n                        classifiers['scaled from(%)'] = int(scaled_percentage)\n                    metric = '{}_{}'.format(label, metric)\n                    context.add_metric(metric, count, units, classifiers=classifiers)\n\n    def _process_simpleperf_record_output(self, context):\n        for host_file in os.listdir(self.outdir):\n            label, ext = os.path.splitext(host_file)\n            context.add_artifact(label, os.path.join(self.outdir, host_file), 'raw')\n            if ext != '.rpt':\n                continue\n            column_headers = []\n            column_header_indeces = []\n            event_type = ''\n            with open(os.path.join(self.outdir, host_file)) as fh:\n                for line in fh:\n                    words = line.split()\n                    if not words:\n                        continue\n                    if words[0] == 'Event:':\n                        event_type = words[1]\n                    column_headers = self._get_report_column_headers(column_headers,\n                                                                     words,\n                                                                     'simpleperf')\n                    for column_header in column_headers:\n                        column_header_indeces.append(line.find(column_header))\n                    self._add_report_metric(column_headers,\n                                            column_header_indeces,\n                                            line,\n                                            words,\n                                            context,\n                                            event_type,\n                                            label)\n\n    @staticmethod\n    def _get_report_column_headers(column_headers, words, perf_type):\n        if 'Overhead' not in words:\n            return column_headers\n        if perf_type == 'perf':\n            words.remove('#')\n        column_headers = words\n        # Concatonate Shared Objects header\n        if 'Shared' in column_headers:\n            shared_index = column_headers.index('Shared')\n            column_headers[shared_index:shared_index + 2] = ['{} {}'.format(column_headers[shared_index],\n                                                                            column_headers[shared_index + 1])]\n        return column_headers\n\n    @staticmethod\n    def _add_report_metric(column_headers, column_header_indeces, line, words, context, event_type, label):\n        if '%' not in words[0]:\n            return\n        classifiers = {}\n        for i in range(1, len(column_headers)):\n            classifiers[column_headers[i]] = line[column_header_indeces[i]:column_header_indeces[i + 1]].strip()\n\n        context.add_metric('{}_{}_Overhead'.format(label, event_type),\n                           numeric(words[0].strip('%')),\n                           'percent',\n                           classifiers=classifiers)\n"
  },
  {
    "path": "wa/instruments/perfetto.py",
    "content": "#    Copyright 2023 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom devlib import PerfettoCollector\n\nfrom wa import Instrument, Parameter\nfrom wa.framework.instrument import very_slow, is_installed\nfrom wa.framework.exception import InstrumentError\n\nOUTPUT_PERFETTO_TRACE = 'devlib-trace.perfetto-trace'\nPERFETTO_CONFIG_FILE = 'config.pbtx'\n\n\nclass PerfettoInstrument(Instrument):\n    name = 'perfetto'\n    description = \"\"\"\n        perfetto is an instrument that interacts with Google's Perfetto tracing\n        infrastructure.\n\n        From Perfetto's website:\n        Perfetto is a production-grade open-source stack for performance instrumentation and trace analysis.\n        It offers services and libraries for recording system-level and app-level traces, native + java heap profiling,\n        a library for analyzing traces using SQL and a web-based UI to visualize and explore multi-GB traces.\n\n        The instrument either requires Perfetto to be present on the target device or the standalone tracebox binary\n        to be built from source and included in devlib's Package Bin directory.\n        For more information, consult the PerfettoCollector documentation in devlib.\n\n        More information can be found on https://perfetto.dev/\n    \"\"\"\n\n    parameters = [\n        Parameter('config', kind=str, mandatory=True,\n                  description=\"\"\"\n                  Path to the Perfetto trace config file.\n\n                  All the Perfetto-specific tracing configuration should be done inside\n                  that file. This config option should just take a full\n                  filesystem path to where the config can be found.\n                  \"\"\"),\n        Parameter('force_tracebox', kind=bool, default=False,\n                  description=\"\"\"\n                  Install tracebox even if traced is already running on the target device.\n                  If set to true, the tracebox binary needs to be placed in devlib's Package Bin directory.\n                  \"\"\")\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(PerfettoInstrument, self).__init__(target, **kwargs)\n        self.collector = None\n\n    def initialize(self, context):  # pylint: disable=unused-argument\n        self.target_config = self.target.path.join(self.target.working_directory, PERFETTO_CONFIG_FILE)\n        # push the config file to target\n        self.target.push(self.config, self.target_config)\n        collector_params = dict(\n            config=self.target_config,\n            force_tracebox=self.force_tracebox\n        )\n        self.collector = PerfettoCollector(self.target, **collector_params)\n\n    @very_slow\n    def start(self, context):  # pylint: disable=unused-argument\n        self.collector.start()\n\n    @very_slow\n    def stop(self, context):  # pylint: disable=unused-argument\n        self.collector.stop()\n\n    def update_output(self, context):\n        self.logger.info('Extracting Perfetto trace from target...')\n        outfile = os.path.join(context.output_directory, OUTPUT_PERFETTO_TRACE)\n        self.collector.set_output(outfile)\n        self.collector.get_data()\n        context.add_artifact('perfetto-bin', outfile, 'data')\n\n    def teardown(self, context):  # pylint: disable=unused-argument\n        self.target.remove(self.collector.target_output_file)\n\n    def finalize(self, context):  # pylint: disable=unused-argument\n        self.target.remove(self.target_config)\n\n    def validate(self):\n        if is_installed('trace-cmd'):\n            raise InstrumentError('perfetto cannot be used at the same time as trace-cmd')\n        if not os.path.isfile(self.config):\n            raise InstrumentError('perfetto config file not found at \"{}\"'.format(self.config))\n"
  },
  {
    "path": "wa/instruments/poller/Makefile",
    "content": "# CROSS_COMPILE=aarch64-linux-gnu- make\n#\nCC=gcc\n\nifdef DEBUG\n\tCFLAGS=-static -lc -g\nelse\n\tCFLAGS=-static -s -lc -O2\nendif\n\npoller: poller.c\n\t$(CROSS_COMPILE)$(CC) $(CFLAGS) poller.c -o poller\n\nclean:\n\trm -rf poller\n\n.PHONY: clean\n"
  },
  {
    "path": "wa/instruments/poller/__init__.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# pylint: disable=access-member-before-definition,attribute-defined-outside-init,unused-argument\nimport os\n\nimport pandas as pd\n\nfrom wa import Instrument, Parameter, Executable\nfrom wa.framework import signal\nfrom wa.framework.exception import ConfigError, InstrumentError\nfrom wa.utils.trace_cmd import TraceCmdParser\nfrom wa.utils.types import list_or_string\n\n\nclass FilePoller(Instrument):\n    name = 'file_poller'\n    description = \"\"\"\n    Polls the given files at a set sample interval. The values are output in CSV format.\n\n    This instrument places a file called poller.csv in each iterations result directory.\n    This file will contain a timestamp column which will be in uS, the rest of the columns\n    will be the contents of the polled files at that time.\n\n    This instrument will strip any commas or new lines for the files' values\n    before writing them.\n    \"\"\"\n\n    parameters = [\n        Parameter('sample_interval', kind=int, default=1000,\n                  description=\"\"\"The interval between samples in mS.\"\"\"),\n        Parameter('files', kind=list_or_string, mandatory=True,\n                  description=\"\"\"A list of paths to the files to be polled\"\"\"),\n        Parameter('labels', kind=list_or_string,\n                  description=\"\"\"\n                  A list of lables to be used in the CSV output for the\n                  corresponding files. This cannot be used if a `*` wildcard is\n                  used in a path.\n                  \"\"\"),\n        Parameter('align_with_ftrace', kind=bool, default=False,\n                  description=\"\"\"\n                  Insert a marker into ftrace that aligns with the first\n                  timestamp. During output processing, extract the marker\n                  and use it's timestamp to adjust the timestamps in the collected\n                  csv so that they align with ftrace.\n                  \"\"\"),\n        Parameter('as_root', kind=bool, default=False,\n                  description=\"\"\"\n                  Whether or not the poller will be run as root. This should be\n                  used when the file you need to poll can only be accessed by root.\n                  \"\"\"),\n        Parameter('reopen', kind=bool, default=False,\n                  description=\"\"\"\n                  When enabled files will be re-opened with each read. This is\n                  useful for some sysfs/debugfs entries that only generate a\n                  value when opened.\n                  \"\"\"),\n    ]\n\n    def validate(self):\n        if not self.files:\n            raise ConfigError('You must specify atleast one file to poll')\n        if self.labels and any(['*' in f for f in self.files]):\n            raise ConfigError('You cannot used manual labels with `*` wildcards')\n\n    def initialize(self, context):\n        if not self.target.is_rooted and self.as_root:\n            raise ConfigError('The target is not rooted, cannot run poller as root.')\n        host_poller = context.get_resource(Executable(self, self.target.abi,\n                                                      \"poller\"))\n        target_poller = self.target.install(host_poller)\n\n        expanded_paths = []\n        for path in self.files:\n            if \"*\" in path:\n                for p in self.target.list_directory(path):\n                    expanded_paths.append(p)\n            else:\n                expanded_paths.append(path)\n        self.files = expanded_paths\n        if not self.labels:\n            self.labels = self._generate_labels()\n\n        self.target_output_path = self.target.path.join(self.target.working_directory, 'poller.csv')\n        self.target_log_path = self.target.path.join(self.target.working_directory, 'poller.log')\n        marker_option = ''\n        if self.align_with_ftrace:\n            marker_option = '-m'\n            signal.connect(self._adjust_timestamps, signal.AFTER_JOB_OUTPUT_PROCESSED)\n        reopen_option = ''\n        if self.reopen:\n            reopen_option = '-r'\n        self.command = '{} {} -t {} {} -l {} {} > {} 2>{}'.format(target_poller,\n                                                                  reopen_option,\n                                                                  self.sample_interval * 1000,\n                                                                  marker_option,\n                                                                  ','.join(self.labels),\n                                                                  ' '.join(self.files),\n                                                                  self.target_output_path,\n                                                                  self.target_log_path)\n\n    def start(self, context):\n        self.target.kick_off(self.command, as_root=self.as_root)\n\n    def stop(self, context):\n        self.target.killall('poller', signal='TERM', as_root=self.as_root)\n\n    def update_output(self, context):\n        host_output_file = os.path.join(context.output_directory, 'poller.csv')\n        self.target.pull(self.target_output_path, host_output_file)\n        context.add_artifact('poller-output', host_output_file, kind='data')\n\n        host_log_file = os.path.join(context.output_directory, 'poller.log')\n        self.target.pull(self.target_log_path, host_log_file)\n        context.add_artifact('poller-log', host_log_file, kind='log')\n\n        with open(host_log_file) as fh:\n            for line in fh:\n                if 'ERROR' in line:\n                    raise InstrumentError(line.strip())\n                if 'WARNING' in line:\n                    self.logger.warning(line.strip())\n\n    def teardown(self, context):\n        self.target.remove(self.target_output_path)\n        self.target.remove(self.target_log_path)\n\n    def _generate_labels(self):\n        # Split paths into their parts\n        path_parts = [f.split(self.target.path.sep) for f in self.files]\n        # Identify which parts differ between at least two of the paths\n        differ_map = [len(set(x)) > 1 for x in zip(*path_parts)]\n\n        # compose labels from path parts that differ\n        labels = []\n        for pp in path_parts:\n            label_parts = [p for i, p in enumerate(pp[:-1])\n                           if i >= len(differ_map) or differ_map[i]]\n            label_parts.append(pp[-1])  # always use file name even if same for all\n            labels.append('-'.join(label_parts))\n        return labels\n\n    def _adjust_timestamps(self, context):\n        output_file = context.get_artifact_path('poller-output')\n        message = 'Adjusting timestamps inside \"{}\" to align with ftrace'\n        self.logger.debug(message.format(output_file))\n\n        trace_txt = context.get_artifact_path('trace-cmd-txt')\n        trace_parser = TraceCmdParser(filter_markers=False)\n        marker_timestamp = None\n        for event in trace_parser.parse(trace_txt):\n            if event.name == 'print' and 'POLLER_START' in event.text:\n                marker_timestamp = event.timestamp\n                break\n\n        if marker_timestamp is None:\n            raise InstrumentError('Did not see poller marker in ftrace')\n\n        df = pd.read_csv(output_file)\n        df.time -= df.time[0]\n        df.time += marker_timestamp\n        df.to_csv(output_file, index=False)\n"
  },
  {
    "path": "wa/instruments/poller/poller.c",
    "content": "/*    Copyright 2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n#include <fcntl.h>\n#include <stdio.h>\n#include <sys/poll.h>\n#include <time.h>\n#include <unistd.h>\n#include <errno.h>\n#include <signal.h>\n#include <string.h>\n#include <stdlib.h>\n\nvolatile sig_atomic_t done = 0;\nvoid term(int signum)\n{\n    done = 1;\n}\n\nvoid strip(char *s) {\n    char *stripped_s = s;\n    while(*s != '\\0') {\n        if(*s != ',' && *s != '\\n') {\n            *stripped_s++ = *s++;\n        } else {\n            ++s;\n        }\n    }\n    *stripped_s = '\\0';\n}\n\ntypedef struct {\n        int fd;\n        char *path;\n} poll_source_t;\n\n\nint write_trace_marker(char *marker, int size)\n{\n        int ret;\n        FILE *file;\n\n        file = fopen(\"/sys/kernel/debug/tracing/trace_marker\", \"w\");\n        if (file == NULL) {\n                return -errno;\n        }\n\n        ret = fwrite(marker, sizeof(char), size, file);\n\n        fclose(file);\n        return ret;\n}\n\nint main(int argc, char ** argv) {\n\n    extern char *optarg;\n    extern int optind;\n    int c = 0;\n    int show_help = 0;\n    useconds_t interval = 1000000;\n    char buf[1024];\n    memset(buf, 0, sizeof(buf));\n    struct timespec current_time;\n    double time_float;\n    char *labels;\n    int labelCount = 0;\n    int should_write_marker = 0;\n    int reopen_files = 0;\n    int ret;\n\n    static char usage[] = \"usage: %s [-h] [-m] [-r] [-t INTERVAL] FILE [FILE ...]\\n\"\n                          \"polls FILE(s) every INTERVAL microseconds and outputs\\n\"\n                          \"the results in CSV format including a timestamp to STDOUT\\n\"\n                          \"\\n\"\n                          \"    -h     Display this message\\n\"\n                          \"    -m     Insert a marker into ftrace at the time of the first\\n\"\n                          \"           sample. This marker may be used to align the timestamps\\n\"\n                          \"           produced by the poller with those of ftrace events.\\n\"\n                          \"    -r     Reopen files on each read (needed for some sysfs/debugfs files)\\n\"\n                          \"    -t     The polling sample interval in microseconds\\n\"\n                          \"           Defaults to 1000000 (1 second)\\n\"\n                          \"    -l     Comma separated list of labels to use in the CSV\\n\"\n                          \"           output. This should match the number of files\\n\";\n\n\n    //Handling command line arguments\n    while ((c = getopt(argc, argv, \"hmrt:l:\")) != -1)\n    {\n        switch(c) {\n            case 'h':\n            case '?':\n            default:\n                show_help = 1;\n                break;\n            case 'm':\n                should_write_marker = 1;\n                break;\n            case 'r':\n                reopen_files = 1;\n                break;\n            case 't':\n                interval = (useconds_t)atoi(optarg);\n                break;\n            case 'l':\n                labels = optarg;\n                labelCount = 1;\n                int i;\n                for (i=0; labels[i]; i++)\n                    labelCount += (labels[i] == ',');\n        }\n    }\n\n    if (show_help) {\n        fprintf(stderr, usage, argv[0]);\n        exit(1);\n    }\n\n    if (optind >= argc) {\n        fprintf(stderr, \"ERROR: %s: missing file path(s)\\n\", argv[0]);\n        fprintf(stderr, usage, argv[0]);\n        exit(1);\n    }\n\n    int num_files = argc - optind;\n    poll_source_t files_to_poll[num_files];\n\n    if (labelCount && labelCount != num_files)\n    {\n        fprintf(stderr, \"ERROR: %s: %d labels specified but %d files specified\\n\",\n                argv[0], labelCount, num_files);\n        fprintf(stderr, usage, argv[0]);\n        exit(1);\n    }\n\n    //Print headers and open files to poll\n    printf(\"time\");\n    if(labelCount)\n    {\n        printf(\",%s\", labels);\n    }\n    int i;\n    for (i = 0; i < num_files; i++)\n    {\n        files_to_poll[i].path = argv[optind + i];\n        files_to_poll[i].fd = open(files_to_poll[i].path, O_RDONLY);\n        if (files_to_poll[i].fd == -1) {\n            fprintf(stderr, \"ERROR: Could not open \\\"%s\\\", got: %s\\n\",\n                    files_to_poll[i].path, strerror(errno));\n            exit(2);\n        }\n\n        if(!labelCount) {\n            printf(\",%s\", argv[optind + i]);\n        }\n    }\n    printf(\"\\n\");\n\n    //Setup SIGTERM handler\n    struct sigaction action;\n    memset(&action, 0, sizeof(struct sigaction));\n    action.sa_handler = term;\n    sigaction(SIGTERM, &action, NULL);\n\n    //Poll files \n    int bytes_read = 0;\n    while (!done) {\n        clock_gettime(CLOCK_BOOTTIME, &current_time);\n        if (should_write_marker) {\n            ret = write_trace_marker(\"POLLER_START\", 12);\n            if (ret < 0) {\n                fprintf(stderr, \"ERROR writing trace marker: %s\\n\", strerror(ret));\n                exit(ret);\n            }\n        }\n\n        time_float = (double)current_time.tv_sec;\n        time_float += ((double)current_time.tv_nsec)/1000/1000/1000;\n        printf(\"%f\", time_float);\n        for (i = 0; i < num_files; i++) {\n            if (reopen_files) {\n                // Close and reopen the file to get fresh data\n                close(files_to_poll[i].fd);\n                files_to_poll[i].fd = open(files_to_poll[i].path, O_RDONLY);\n                if (files_to_poll[i].fd == -1) {\n                    fprintf(stderr, \"WARNING: Could not reopen \\\"%s\\\", got: %s\\n\",\n                            files_to_poll[i].path, strerror(errno));\n                    printf(\",\");\n                    continue;\n                }\n            } else {\n                lseek(files_to_poll[i].fd, 0, SEEK_SET);\n            }\n\n            bytes_read = read(files_to_poll[i].fd, buf, 1024);\n\n            if (bytes_read < 0) {\n                fprintf(stderr, \"WARNING: Read nothing from \\\"%s\\\"\\n\",\n                        files_to_poll[i].path);\n                printf(\",\");\n                continue;\n            }\n\n            strip(buf);\n            printf(\",%s\", buf);\n            memset(buf, 0, sizeof(buf)); // \"Empty\" buffer\n        }\n        printf(\"\\n\");\n        usleep(interval);\n    }\n\n    //Close files\n    for (i = 0; i < num_files; i++)\n    {\n        close(files_to_poll[i].fd);\n    }\n    exit(0);\n}\n"
  },
  {
    "path": "wa/instruments/proc_stat/__init__.py",
    "content": "#    Copyright 2020 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport os\nimport time\nfrom datetime import datetime, timedelta\n\nimport pandas as pd\n\nfrom wa import Instrument, Parameter, File, InstrumentError\n\n\nclass ProcStatCollector(Instrument):\n\n    name = 'proc_stat'\n    description = '''\n    Collect CPU load information from /proc/stat.\n    '''\n\n    parameters = [\n        Parameter('period', int, default=5,\n                  constraint=lambda x: x > 0,\n                  description='''\n                  Time (in seconds) between collections.\n                  '''),\n    ]\n\n    def initialize(self, context):  # pylint: disable=unused-argument\n        self.host_script = context.get_resource(File(self, 'gather-load.sh'))\n        self.target_script = self.target.install(self.host_script)\n        self.target_output = self.target.get_workpath('proc-stat-raw.csv')\n        self.stop_file = self.target.get_workpath('proc-stat-stop.signal')\n\n    def setup(self, context):  # pylint: disable=unused-argument\n        self.command = '{} sh {} {} {} {} {}'.format(\n            self.target.busybox,\n            self.target_script,\n            self.target.busybox,\n            self.target_output,\n            self.period,\n            self.stop_file,\n        )\n        self.target.remove(self.target_output)\n        self.target.remove(self.stop_file)\n\n    def start(self, context):  # pylint: disable=unused-argument\n        self.target.kick_off(self.command)\n\n    def stop(self, context):  # pylint: disable=unused-argument\n        self.target.execute('{} touch {}'.format(self.target.busybox, self.stop_file))\n\n    def update_output(self, context):\n        self.logger.debug('Waiting for collector script to terminate...')\n        self._wait_for_script()\n        self.logger.debug('Waiting for collector script to terminate...')\n        host_output = os.path.join(context.output_directory, 'proc-stat-raw.csv')\n        self.target.pull(self.target_output, host_output)\n        context.add_artifact('proc-stat-raw', host_output, kind='raw')\n\n        df = pd.read_csv(host_output)\n        no_ts = df[df.columns[1:]]\n        deltas = (no_ts - no_ts.shift())\n        total = deltas.sum(axis=1)\n        util = (total - deltas.idle) / total * 100\n        out_df = pd.concat([df.timestamp, util], axis=1).dropna()\n        out_df.columns = ['timestamp', 'cpu_util']\n\n        util_file = os.path.join(context.output_directory, 'proc-stat.csv')\n        out_df.to_csv(util_file, index=False)\n        context.add_artifact('proc-stat', util_file, kind='data')\n\n    def finalize(self, context):  # pylint: disable=unused-argument\n        if self.cleanup_assets and getattr(self, 'target_output'):\n            self.target.remove(self.target_output)\n            self.target.remove(self.target_script)\n\n    def _wait_for_script(self):\n        start_time = datetime.utcnow()\n        timeout = timedelta(seconds=300)\n        while self.target.file_exists(self.stop_file):\n            delta = datetime.utcnow() - start_time\n            if delta > timeout:\n                raise InstrumentError('Timed out wating for /proc/stat collector to terminate..')\n"
  },
  {
    "path": "wa/instruments/proc_stat/gather-load.sh",
    "content": "#!/bin/sh\nBUSYBOX=$1\nOUTFILE=$2\nPERIOD=$3\nSTOP_SIGNAL_FILE=$4\n\nif [ \"$#\" != \"4\" ]; then\n    echo \"USAGE: gather-load.sh BUSYBOX OUTFILE PERIOD STOP_SIGNAL_FILE\"\n    exit 1\nfi\n\necho \"timestamp,user,nice,system,idle,iowait,irq,softirq,steal,guest,guest_nice\" > $OUTFILE\nwhile true; do\n    echo -n $(${BUSYBOX} date -Iseconds) >> $OUTFILE\n    ${BUSYBOX} cat /proc/stat | ${BUSYBOX} head -n 1 | \\\n        ${BUSYBOX} cut -d ' ' -f 2- | ${BUSYBOX} sed 's/ /,/g' >> $OUTFILE\n    if [ -f $STOP_SIGNAL_FILE ]; then\n        rm $STOP_SIGNAL_FILE\n        break\n    else\n        sleep $PERIOD\n    fi\ndone\n"
  },
  {
    "path": "wa/instruments/screencap.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom devlib.collector.screencapture import ScreenCaptureCollector\n\nfrom wa import Instrument, Parameter\n\n\nclass ScreenCaptureInstrument(Instrument):\n\n    name = 'screen_capture'\n    description = \"\"\"\n    A simple instrument which captures the screen on the target devices with a user-specified period.\n\n    Please note that if a too short period is specified, then this\n    instrument will capture the screen as fast as possible, rather\n    than at the specified periodicity.\n    \"\"\"\n\n    parameters = [\n        Parameter('period', kind=int, default=10,\n                  description=\"\"\"\n                  Period (in seconds) at which to capture the screen on the target.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(ScreenCaptureInstrument, self).__init__(target, **kwargs)\n        self.collector = None\n\n    def setup(self, context):\n        # We need to create a directory for the captured screenshots\n        output_path = os.path.join(context.output_directory, \"screen-capture\")\n        os.mkdir(output_path)\n        self.collector = ScreenCaptureCollector(self.target,\n                                                self.period)\n        self.collector.set_output(output_path)\n        self.collector.reset()\n\n    def start(self, context):  # pylint: disable=unused-argument\n        self.collector.start()\n\n    def stop(self, context):  # pylint: disable=unused-argument\n        self.collector.stop()\n"
  },
  {
    "path": "wa/instruments/serialmon.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom devlib import SerialTraceCollector\n\nfrom wa import Instrument, Parameter, hostside\n\n\nclass SerialMon(Instrument):\n\n    name = 'serialmon'\n    description = \"\"\"\n    Records the traffic on a serial connection\n\n    The traffic on a serial connection is monitored and logged to a\n    file. In the event that the device is reset, the instrument will\n    stop monitoring during the reset, and will reconnect once the\n    reset has completed. This is to account for devices (i.e., the\n    Juno) which utilise the serial connection to reset the board.\n    \"\"\"\n\n    parameters = [\n        Parameter('serial_port', kind=str, default=\"/dev/ttyS0\",\n                  description=\"\"\"\n                  The serial device to monitor.\n                  \"\"\"),\n        Parameter('baudrate', kind=int, default=115200,\n                  description=\"\"\"\n                  The baud-rate to use when connecting to the serial connection.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(SerialMon, self).__init__(target, **kwargs)\n        self._collector = SerialTraceCollector(target, self.serial_port, self.baudrate)\n\n    def start_logging(self, context, filename=\"serial.log\"):\n        outpath = os.path.join(context.output_directory, filename)\n        self._collector.set_output(outpath)\n        self._collector.reset()\n        self.logger.debug(\"Acquiring serial port ({})\".format(self.serial_port))\n        if self._collector.collecting:\n            self.stop_logging(context)\n        self._collector.start()\n\n    def stop_logging(self, context, identifier=\"job\"):\n        self.logger.debug(\"Releasing serial port ({})\".format(self.serial_port))\n        if self._collector.collecting:\n            self._collector.stop()\n            data = self._collector.get_data()\n            for l in data:  # noqa: E741\n                context.add_artifact(\"{}_serial_log\".format(identifier),\n                                     l.path, kind=\"log\")\n\n    def on_run_start(self, context):\n        self.start_logging(context, \"preamble_serial.log\")\n\n    def before_job_queue_execution(self, context):\n        self.stop_logging(context, \"preamble\")\n\n    def after_job_queue_execution(self, context):\n        self.start_logging(context, \"postamble_serial.log\")\n\n    def on_run_end(self, context):\n        self.stop_logging(context, \"postamble\")\n\n    def on_job_start(self, context):\n        self.start_logging(context)\n\n    def on_job_end(self, context):\n        self.stop_logging(context)\n\n    @hostside\n    def before_reboot(self, context):\n        self.stop_logging(context)\n"
  },
  {
    "path": "wa/instruments/trace_cmd.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=W0613,E1101\n\nimport os\n\nfrom devlib import FtraceCollector\n\nfrom wa import Instrument, Parameter\nfrom wa.framework import signal\nfrom wa.framework.instrument import very_slow, is_installed\nfrom wa.framework.exception import InstrumentError\nfrom wa.utils.types import list_of_strings\nfrom wa.utils.misc import which\n\n\nOUTPUT_TRACE_FILE = 'trace.dat'\nOUTPUT_TEXT_FILE = '{}.txt'.format(os.path.splitext(OUTPUT_TRACE_FILE)[0])\nTIMEOUT = 180\n\n\nclass TraceCmdInstrument(Instrument):\n\n    name = 'trace-cmd'\n    description = \"\"\"\n    trace-cmd is an instrument which interacts with ftrace Linux kernel internal\n    tracer\n\n    From trace-cmd man page:\n\n    trace-cmd command interacts with the ftrace tracer that is built inside the\n    Linux kernel. It interfaces with the ftrace specific files found in the\n    debugfs file system under the tracing directory.\n\n    trace-cmd reads a list of events it will trace, which can be specified in\n    the config file as follows ::\n\n        trace_events = ['irq*', 'power*']\n\n    If no event is specified, a default set of events that are generally considered useful\n    for debugging/profiling purposes will be enabled.\n\n    The list of available events can be obtained by rooting and running the\n    following command line on the device ::\n\n       trace-cmd list\n\n    You may also specify ``trace_buffer_size`` setting which must be an integer\n    that will be used to set the ftrace buffer size. It will be interpreted as\n    KB::\n\n        trace_cmd_buffer_size = 8000\n\n    The maximum buffer size varies from device to device, but there is a\n    maximum and trying to set buffer size beyond that will fail. If you plan\n    on collecting a lot of trace over long periods of time, the buffer size\n    will not be enough and you will only get trace for the last portion of your\n    run. To deal with this you can set the ``trace_mode`` setting to\n    ``'record'`` (the default is ``'start'``)::\n\n        trace_cmd_mode = 'record'\n\n    This will cause trace-cmd to trace into file(s) on disk, rather than the\n    buffer, and so the limit for the max size of the trace is set by the\n    storage available on device. Bear in mind that ``'record'`` mode *is* more\n    intrusive than the default, so if you do not plan on generating a lot of\n    trace, it is best to use the default ``'start'`` mode.\n\n    .. note:: Mode names correspond to the underlying trace-cmd executable's\n              command used to implement them. You can find out more about what\n              is happening in each case from trace-cmd documentation:\n              https://lwn.net/Articles/341902/.\n\n    This instrument comes with an trace-cmd binary that will be copied and used\n    on the device, however post-processing will be, by default, done on-host and you must\n    have trace-cmd installed and in your path. On Ubuntu systems, this may be\n    done with::\n\n        sudo apt-get install trace-cmd\n\n    Alternatively, you may set ``report_on_target`` parameter to ``True`` to enable on-target\n    processing (this is useful when running on non-Linux hosts, but is likely to take longer\n    and may fail on particularly resource-constrained targets).\n\n    \"\"\"\n\n    parameters = [\n        Parameter('events', kind=list_of_strings,\n                  default=['sched*', 'irq*', 'power*', 'thermal*'],\n                  global_alias='trace_events',\n                  description=\"\"\"\n                  Specifies the list of events to be traced. Each event in the\n                  list will be passed to trace-cmd with -e parameter and must\n                  be in the format accepted by trace-cmd.\n                  \"\"\"),\n        Parameter('functions', kind=list_of_strings,\n                  global_alias='trace_functions',\n                  description=\"\"\"\n                  Specifies the list of functions to be traced.\n                  \"\"\"),\n        Parameter('buffer_size', kind=int, default=None,\n                  global_alias='trace_buffer_size',\n                  description=\"\"\"\n                  Attempt to set ftrace buffer size to the specified value (in\n                  KB). Default buffer size may need to be increased for\n                  long-running workloads, or if a large number of events have\n                  been enabled. Note: there is a maximum size that the buffer\n                  can be set, and that varies from device to device. Attempting\n                  to set buffer size higher than this will fail. In that case,\n                  this instrument will set the size to the highest possible\n                  value by going down from the specified size in\n                  ``buffer_size_step`` intervals.\n                  \"\"\"),\n        Parameter('top_buffer_size', kind=int, default=None,\n                  global_alias='trace_top_buffer_size',\n                  description=\"\"\"\n                  The same as buffer_size except it sets the size of the\n                  top-level buffer instead of the devlib one. If left unset,\n                  it will default to the same as the devlib buffer size.\n                  \"\"\"),\n        Parameter('buffer_size_step', kind=int, default=1000,\n                  global_alias='trace_buffer_size_step',\n                  description=\"\"\"\n                  Defines the decremental step used if the specified\n                  ``buffer_size`` could not be set.  This will be subtracted\n                  form the buffer size until set succeeds or size is reduced to\n                  1MB.\n                  \"\"\"),\n        Parameter('report', kind=bool, default=True,\n                  description=\"\"\"\n                  Specifies whether reporting should be performed once the\n                  binary trace has been generated.\n                  \"\"\"),\n        Parameter('no_install', kind=bool, default=False,\n                  description=\"\"\"\n                  Do not install the bundled trace-cmd  and use the one on the\n                  device instead. If there is not already a trace-cmd on the\n                  device, an error is raised.\n                  \"\"\"),\n        Parameter('report_on_target', kind=bool, default=False,\n                  description=\"\"\"\n                  When enabled generation of reports will be done host-side\n                  because the generated file is very large. If trace-cmd is not\n                  available on the host device this setting can be disabled and\n                  the report will be generated on the target device.\n\n                  .. note:: This requires the latest version of trace-cmd to be\n                            installed on the host (the one in your\n                            distribution's repos may be too old).\n                  \"\"\"),\n        Parameter('mode', kind=str, default='write-to-memory',\n                  allowed_values=['write-to-disk', 'write-to-memory'],\n                  description=\"\"\"\n                  Specifies whether collected traces should be saved in memory or disk.\n                  Extensive workloads may hit out of memory issue. Hence, write-to-disk\n                  mode can help in such cases.\n                  \"\"\"),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(TraceCmdInstrument, self).__init__(target, **kwargs)\n        self.collector = None\n\n    def initialize(self, context):\n        if not self.target.is_rooted:\n            raise InstrumentError('trace-cmd instrument cannot be used on an unrooted device.')\n        collector_params = dict(\n            events=self.events,\n            functions=self.functions,\n            buffer_size=self.buffer_size,\n            top_buffer_size=self.top_buffer_size,\n            buffer_size_step=1000,\n            automark=False,\n            autoreport=True,\n            autoview=False,\n            no_install=self.no_install,\n            strict=False,\n            report_on_target=False,\n            mode=self.mode,\n        )\n        if self.report and self.report_on_target:\n            collector_params['autoreport'] = True\n            collector_params['report_on_target'] = True\n        else:\n            collector_params['autoreport'] = False\n            collector_params['report_on_target'] = False\n        self.collector = FtraceCollector(self.target, **collector_params)\n\n        # Register ourselves as absolute last event before and\n        #   first after so we can mark the trace at the right time\n        signal.connect(self.mark_start, signal.BEFORE_WORKLOAD_EXECUTION, priority=11)\n        signal.connect(self.mark_stop, signal.AFTER_WORKLOAD_EXECUTION, priority=11)\n\n    def setup(self, context):\n        if self.collector:\n            self.collector.reset()\n\n    @very_slow\n    def start(self, context):\n        if self.collector:\n            self.collector.start()\n\n    @very_slow\n    def stop(self, context):\n        if self.collector:\n            self.collector.stop()\n\n    def update_output(self, context):  # NOQA pylint: disable=R0912\n        if not self.collector:\n            return\n        self.logger.info('Extracting trace from target...')\n        outfile = os.path.join(context.output_directory, OUTPUT_TRACE_FILE)\n\n        self.collector.set_output(outfile)\n        self.collector.get_data()\n        context.add_artifact('trace-cmd-bin', outfile, 'data')\n        if self.report:\n            textfile = os.path.join(context.output_directory, OUTPUT_TEXT_FILE)\n\n            if not self.report_on_target:\n                self.collector.report(outfile, textfile)\n            context.add_artifact('trace-cmd-txt', textfile, 'export')\n\n    def teardown(self, context):\n        path = self.target.path.join(self.target.working_directory, OUTPUT_TRACE_FILE)\n        self.target.remove(path)\n        if self.report_on_target:\n            path = self.target.path.join(self.target.working_directory, OUTPUT_TEXT_FILE)\n            self.target.remove(path)\n\n    def validate(self):\n        if self.report and not self.report_on_target and not which('trace-cmd'):\n            raise InstrumentError('trace-cmd is not in PATH; is it installed?')\n        if is_installed('perfetto'):\n            raise InstrumentError('trace-cmd cannot be used at the same time as perfetto')\n\n    def mark_start(self, context):\n        if self.is_enabled:\n            self.collector.mark_start()\n\n    def mark_stop(self, context):\n        if self.is_enabled:\n            self.collector.mark_stop()\n"
  },
  {
    "path": "wa/output_processors/__init__.py",
    "content": ""
  },
  {
    "path": "wa/output_processors/cpustates.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom collections import OrderedDict\n\nfrom devlib.utils.csvutil import csvwriter\n\nfrom wa import OutputProcessor, Parameter\nfrom wa.utils.cpustates import report_power_stats\n\n\ndef _get_cpustates_description():\n    \"\"\"\n    Reuse the description for report_power_stats() but strip away it's\n    parameter docs, as they are not relevant to the OuputProcessor.\n    \"\"\"\n    output_lines = []\n    lines = iter(report_power_stats.__doc__.split('\\n'))\n    line = next(lines)\n    while True:\n        try:\n            if line.strip().startswith(':param'):\n                while line.strip():\n                    line = next(lines)\n            output_lines.append(line)\n            line = next(lines)\n        except StopIteration:\n            break\n    return '\\n'.join(output_lines)\n\n\nclass CpuStatesProcessor(OutputProcessor):\n\n    name = 'cpustates'\n\n    description = _get_cpustates_description()\n\n    parameters = [\n        Parameter('use_ratios', kind=bool, default=False,\n                  description=\"\"\"\n                  By default proportional values will be reported as\n                  percentages, if this flag is enabled, they will be reported\n                  as ratios instead.\n                  \"\"\"),\n        Parameter('no_idle', kind=bool, default=False,\n                  description=\"\"\"\n                  Indicate that there will be no idle transitions in the trace.\n                  By default, a core will be reported as being in an \"unknown\"\n                  state until the first idle transtion for that core. Normally,\n                  this is not an issue, as cores are \"nudged\" as part of the\n                  setup to ensure that there is an idle transtion before the\n                  meassured region. However, if all idle states for the core\n                  have been disabled, or if the kernel does not have cpuidle,\n                  the nudge will not result in an idle transition, which would\n                  cause the cores to be reported to be in \"unknown\" state for\n                  the entire execution.\n\n                  If this parameter is set to ``True``, the processor will\n                  assume that cores are running prior to the begining of the\n                  issue, and they will leave unknown state on the first\n                  frequency transition.\n                  \"\"\"),\n        Parameter('split_wfi_states', kind=bool, default=False,\n                  description=\"\"\"\n                  WFI is a very shallow idle state. The core remains powered on\n                  when in this state, which means the power usage while in this\n                  state will depend on the current voltage, and therefore current\n                  frequency.\n\n                  Setting this to ``True`` will track time spent in WFI at\n                  each frequency separately, allowing to gain the most accurate\n                  picture of energy usage.\n                  \"\"\"),\n    ]\n\n    def __init__(self, *args, **kwargs):\n        super(CpuStatesProcessor, self).__init__(*args, **kwargs)\n        self.iteration_reports = OrderedDict()\n\n    def process_job_output(self, output, target_info, run_output):  # pylint: disable=unused-argument\n        trace_file = output.get_artifact_path('trace-cmd-txt')\n        if not trace_file:\n            self.logger.warning('Text trace does not appear to have been generated; skipping this iteration.')\n            return\n        if 'cpufreq' not in target_info.modules:\n            msg = '\"cpufreq\" module not detected on target, cpu frequency information may be missing.'\n            self.logger.warning(msg)\n        if 'cpuidle' not in target_info.modules:\n            msg = '\"cpuidle\" module not detected on target, cpu idle information may be missing.'\n            self.logger.debug(msg)\n\n        self.logger.info('Generating power state reports from trace...')\n        reports = report_power_stats(  # pylint: disable=unbalanced-tuple-unpacking\n            trace_file=trace_file,\n            output_basedir=output.basepath,\n            cpus=target_info.cpus,\n            use_ratios=self.use_ratios,\n            no_idle=self.no_idle,\n            split_wfi_states=self.split_wfi_states,\n        )\n\n        for report in reports.values():\n            output.add_artifact(report.name, report.filepath, kind='data')\n\n        iteration_id = (output.id, output.label, output.iteration)\n        self.iteration_reports[iteration_id] = reports\n\n    # pylint: disable=too-many-locals,unused-argument\n    def process_run_output(self, output, target_info):\n        if not self.iteration_reports:\n            self.logger.warning('No power state reports generated.')\n            return\n\n        parallel_rows = []\n        powerstate_rows = []\n        for iteration_id, reports in self.iteration_reports.items():\n            job_id, workload, iteration = iteration_id\n            parallel_report = reports['parallel-stats']\n            powerstate_report = reports['power-state-stats']\n\n            for record in parallel_report.values:\n                parallel_rows.append([job_id, workload, iteration] + record)\n            for state in sorted(powerstate_report.state_stats):\n                stats = powerstate_report.state_stats[state]\n                powerstate_rows.append([job_id, workload, iteration, state]\n                                       + ['{:.3f}'.format(s if s is not None else 0)\n                                           for s in stats])\n\n        outpath = output.get_path('parallel-stats.csv')\n        with csvwriter(outpath) as writer:\n            writer.writerow(['id', 'workload', 'iteration', 'cluster',\n                             'number_of_cores', 'total_time',\n                             '%time', '%running_time'])\n            writer.writerows(parallel_rows)\n        output.add_artifact('run-parallel-stats', outpath, kind='export')\n\n        outpath = output.get_path('power-state-stats.csv')\n        with csvwriter(outpath) as writer:\n            headers = ['id', 'workload', 'iteration', 'state']\n            headers += ['{} CPU{}'.format(c, i)\n                        for i, c in enumerate(powerstate_report.core_names)]\n            writer.writerow(headers)\n            writer.writerows(powerstate_rows)\n        output.add_artifact('run-power-state-stats', outpath, kind='export')\n"
  },
  {
    "path": "wa/output_processors/csvproc.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom devlib.utils.csvutil import csvwriter\n\nfrom wa import OutputProcessor, Parameter\nfrom wa.framework.exception import ConfigError\nfrom wa.utils.types import list_of_strings\n\n\nclass CsvReportProcessor(OutputProcessor):\n\n    name = 'csv'\n    description = \"\"\"\n    Creates a ``results.csv`` in the output directory containing results for\n    all iterations in CSV format, each line containing a single metric.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('use_all_classifiers', kind=bool, default=False,\n                  global_alias='use_all_classifiers',\n                  description=\"\"\"\n                  If set to ``True``, this will add a column for every classifier\n                  that features in at least one collected metric.\n\n                  .. note:: This cannot be ``True`` if ``extra_columns`` is set.\n\n                  \"\"\"),\n        Parameter('extra_columns', kind=list_of_strings,\n                  description=\"\"\"\n                  List of classifiers to use as columns.\n\n                   .. note:: This cannot be set if ``use_all_classifiers`` is\n                             ``True``.\n\n                  \"\"\"),\n    ]\n\n    def __init__(self, *args, **kwargs):\n        super(CsvReportProcessor, self).__init__(*args, **kwargs)\n        self.outputs_so_far = []\n        self.artifact_added = False\n\n    def validate(self):\n        super(CsvReportProcessor, self).validate()\n        if self.use_all_classifiers and self.extra_columns:\n            msg = 'extra_columns cannot be specified when '\\\n                  'use_all_classifiers is True'\n            raise ConfigError(msg)\n\n    # pylint: disable=unused-argument\n    def process_job_output(self, output, target_info, run_output):\n        self.outputs_so_far.append(output)\n        self._write_outputs(self.outputs_so_far, run_output)\n        if not self.artifact_added:\n            run_output.add_artifact('run_result_csv', 'results.csv', 'export')\n            self.artifact_added = True  # pylint: disable=attribute-defined-outside-init\n\n    def process_run_output(self, output, target_info):  # pylint: disable=unused-argument\n        self.outputs_so_far.append(output)\n        self._write_outputs(self.outputs_so_far, output)\n        if not self.artifact_added:\n            output.add_artifact('run_result_csv', 'results.csv', 'export')\n            self.artifact_added = True  # pylint: disable=attribute-defined-outside-init\n\n    def _write_outputs(self, outputs, output):\n        if self.use_all_classifiers:\n            classifiers = set([])\n            for out in outputs:\n                for metric in out.metrics:\n                    classifiers.update(list(metric.classifiers.keys()))\n            extra_columns = list(classifiers)\n        elif self.extra_columns:\n            extra_columns = self.extra_columns\n        else:\n            extra_columns = []\n\n        outfile = output.get_path('results.csv')\n        with csvwriter(outfile) as writer:\n            writer.writerow(['id', 'workload', 'iteration', 'metric', ]\n                            + extra_columns + ['value', 'units'])\n\n            for o in outputs:\n                if o.kind == 'job':\n                    header = [o.id, o.label, o.iteration]\n                elif o.kind == 'run':\n                    # Should be a RunOutput. Run-level metrics aren't attached\n                    # to any job so we leave 'id' and 'iteration' blank, and use\n                    # the run name for the 'label' field.\n                    header = [None, o.info.run_name, None]\n                else:\n                    raise RuntimeError(\n                        'Output of kind \"{}\" unrecognised by csvproc'.format(o.kind))\n\n                for metric in o.result.metrics:\n                    row = (header + [metric.name]\n                           + [str(metric.classifiers.get(c, ''))\n                           for c in extra_columns]\n                           + [str(metric.value), metric.units or ''])\n                    writer.writerow(row)\n"
  },
  {
    "path": "wa/output_processors/postgresql.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport uuid\nimport collections\nimport tarfile\n\ntry:\n    import psycopg2\n    from psycopg2 import (connect, extras)\n    from psycopg2 import Error as Psycopg2Error\nexcept ImportError as e:\n    psycopg2 = None\n    import_error_msg = e.args[0] if e.args else str(e)\n\nfrom devlib.target import KernelVersion, KernelConfig\n\nfrom wa import OutputProcessor, Parameter, OutputProcessorError\nfrom wa.framework.target.info import CpuInfo\nfrom wa.utils.postgres import (POSTGRES_SCHEMA_DIR, cast_level, cast_vanilla,\n                               adapt_vanilla, return_as_is, adapt_level,\n                               ListOfLevel, adapt_ListOfX, create_iterable_adapter,\n                               get_schema_versions)\nfrom wa.utils.serializer import json\nfrom wa.utils.types import level\n\n\nclass PostgresqlResultProcessor(OutputProcessor):\n\n    name = 'postgres'\n    description = \"\"\"\n    Stores results in a Postgresql database.\n\n    The structure of this database can easily be understood by examining\n    the postgres_schema.sql file (the schema used to generate it):\n    {}\n    \"\"\".format(os.path.join(POSTGRES_SCHEMA_DIR, 'postgres_schema.sql'))\n\n    parameters = [\n        Parameter('username', default='postgres',\n                  description=\"\"\"\n                  This is the username that will be used to connect to the\n                  Postgresql database. Note that depending on whether the user\n                  has privileges to modify the database (normally only possible\n                  on localhost), the user may only be able to append entries.\n                  \"\"\"),\n        Parameter('password', default=None,\n                  description=\"\"\"\n                  The password to be used to connect to the specified database\n                  with the specified username.\n                  \"\"\"),\n        Parameter('dbname', default='wa',\n                  description=\"\"\"\n                  Name of the database that will be created or added to. Note,\n                  to override this, you can specify a value in your user\n                  wa configuration file.\n                  \"\"\"),\n        Parameter('host', kind=str, default='localhost',\n                  description=\"\"\"\n                  The host where the Postgresql server is running. The default\n                  is localhost (i.e. the machine that wa is running on).\n                  This is useful for complex systems where multiple machines\n                  may be executing workloads and uploading their results to\n                  a remote, centralised database.\n                  \"\"\"),\n        Parameter('port', kind=str, default='5432',\n                  description=\"\"\"\n                  The port the Postgresql server is running on, on the host.\n                  The default is Postgresql's default, so do not change this\n                  unless you have modified the default port for Postgresql.\n                  \"\"\"),\n    ]\n\n    # Commands\n    sql_command = {\n        \"create_run\": \"INSERT INTO Runs (oid, event_summary, basepath, status, timestamp, run_name, project, project_stage, retry_on_status, max_retries, bail_on_init_failure, allow_phone_home, run_uuid, start_time, metadata, state, _pod_version, _pod_serialization_version) \"\n                      \"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n        \"update_run\": \"UPDATE Runs SET event_summary=%s, status=%s, timestamp=%s, end_time=%s, duration=%s, state=%s WHERE oid=%s;\",\n        \"create_job\": \"INSERT INTO Jobs (oid, run_oid, status, retry, label, job_id, iterations, workload_name, metadata, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);\",\n        \"create_target\": \"INSERT INTO Targets (oid, run_oid, target, modules, cpus, os, os_version, hostid, hostname, abi, is_rooted, kernel_version, kernel_release, kernel_sha1, kernel_config, sched_features, page_size_kb, system_id, screen_resolution, prop, android_id, _pod_version, _pod_serialization_version) \"\n                         \"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n        \"create_event\": \"INSERT INTO Events (oid, run_oid, job_oid, timestamp, message, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s)\",\n        \"create_artifact\": \"INSERT INTO Artifacts (oid, run_oid, job_oid, name, large_object_uuid, description, kind, is_dir, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n        \"create_metric\": \"INSERT INTO Metrics (oid, run_oid, job_oid, name, value, units, lower_is_better, _pod_version, _pod_serialization_version) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n        \"create_augmentation\": \"INSERT INTO Augmentations (oid, run_oid, name) VALUES (%s, %s, %s)\",\n        \"create_classifier\": \"INSERT INTO Classifiers (oid, artifact_oid, metric_oid, job_oid, run_oid, key, value) VALUES (%s, %s, %s, %s, %s, %s, %s)\",\n        \"create_parameter\": \"INSERT INTO Parameters (oid, run_oid, job_oid, augmentation_oid, resource_getter_oid, name, value, value_type, type) \"\n                            \"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n        \"create_resource_getter\": \"INSERT INTO Resource_Getters (oid, run_oid, name) VALUES (%s, %s, %s)\",\n        \"create_job_aug\": \"INSERT INTO Jobs_Augs (oid, job_oid, augmentation_oid) VALUES (%s, %s, %s)\",\n        \"create_large_object\": \"INSERT INTO LargeObjects (oid, lo_oid) VALUES (%s, %s)\"\n    }\n\n    # Lists to track which run-related items have already been added\n    metrics_already_added = []\n    # Dicts needed so that jobs can look up ids\n    artifacts_already_added = {}\n    augmentations_already_added = {}\n\n    # Status bits (flags)\n    first_job_run = True\n\n    def __init__(self, *args, **kwargs):\n        super(PostgresqlResultProcessor, self).__init__(*args, **kwargs)\n        self.conn = None\n        self.cursor = None\n        self.run_uuid = None\n        self.target_uuid = None\n\n    def initialize(self, context):\n\n        if not psycopg2:\n            raise ImportError(\n                'The psycopg2 module is required for the '\n                + 'Postgresql Output Processor: {}'.format(import_error_msg))\n        # N.B. Typecasters are for postgres->python and adapters the opposite\n        self.connect_to_database()\n\n        # Register the adapters and typecasters for enum types\n        self.cursor.execute(\"SELECT NULL::status_enum\")\n        status_oid = self.cursor.description[0][1]\n        self.cursor.execute(\"SELECT NULL::param_enum\")\n        param_oid = self.cursor.description[0][1]\n        LEVEL = psycopg2.extensions.new_type(\n            (status_oid,), \"LEVEL\", cast_level)\n        psycopg2.extensions.register_type(LEVEL)\n        PARAM = psycopg2.extensions.new_type(\n            (param_oid,), \"PARAM\", cast_vanilla)\n        psycopg2.extensions.register_type(PARAM)\n        psycopg2.extensions.register_adapter(level, return_as_is(adapt_level))\n        psycopg2.extensions.register_adapter(\n            ListOfLevel, adapt_ListOfX(adapt_level))\n        psycopg2.extensions.register_adapter(KernelVersion, adapt_vanilla)\n        psycopg2.extensions.register_adapter(\n            CpuInfo, adapt_vanilla)\n        psycopg2.extensions.register_adapter(\n            collections.OrderedDict, extras.Json)\n        psycopg2.extensions.register_adapter(dict, extras.Json)\n        psycopg2.extensions.register_adapter(\n            KernelConfig, create_iterable_adapter(2, explicit_iterate=True))\n        # Register ready-made UUID type adapter\n        extras.register_uuid()\n\n        # Insert a run_uuid which will be globally accessible during the run\n        self.run_uuid = uuid.UUID(str(uuid.uuid4()))\n        run_output = context.run_output\n        retry_on_status = ListOfLevel(run_output.run_config.retry_on_status)\n        self.cursor.execute(\n            self.sql_command['create_run'],\n            (\n                self.run_uuid,\n                run_output.event_summary,\n                run_output.basepath,\n                run_output.status,\n                run_output.state.timestamp,\n                run_output.info.run_name,\n                run_output.info.project,\n                run_output.info.project_stage,\n                retry_on_status,\n                run_output.run_config.max_retries,\n                run_output.run_config.bail_on_init_failure,\n                run_output.run_config.allow_phone_home,\n                run_output.info.uuid,\n                run_output.info.start_time,\n                run_output.metadata,\n                json.dumps(run_output.state.to_pod()),\n                run_output.result._pod_version,  # pylint: disable=protected-access\n                run_output.result._pod_serialization_version,  # pylint: disable=protected-access\n            )\n        )\n        self.target_uuid = uuid.uuid4()\n        target_info = context.target_info\n        target_pod = target_info.to_pod()\n        self.cursor.execute(\n            self.sql_command['create_target'],\n            (\n                self.target_uuid,\n                self.run_uuid,\n                target_pod['target'],\n                target_pod['modules'],\n                target_pod['cpus'],\n                target_pod['os'],\n                target_pod['os_version'],\n                target_pod['hostid'],\n                target_pod['hostname'],\n                target_pod['abi'],\n                target_pod['is_rooted'],\n                # Important caveat: kernel_version is the name of the column in the Targets table\n                # However, this refers to kernel_version.version, not to kernel_version as a whole\n                target_pod['kernel_version'],\n                target_pod['kernel_release'],\n                target_info.kernel_version.sha1,\n                target_info.kernel_config,\n                target_pod['sched_features'],\n                target_pod['page_size_kb'],\n                target_pod['system_id'],\n                # Android Specific\n                list(target_pod.get('screen_resolution', [])),\n                target_pod.get('prop'),\n                target_pod.get('android_id'),\n                target_pod.get('_pod_version'),\n                target_pod.get('_pod_serialization_version'),\n            )\n        )\n\n        # Commit cursor commands\n        self.conn.commit()\n\n    def export_job_output(self, job_output, target_info, run_output):   # pylint: disable=too-many-branches, too-many-statements, too-many-locals, unused-argument\n        ''' Run once for each job to upload information that is\n            updated on a job by job basis.\n        '''\n        # Ensure we're still connected to the database.\n        self.connect_to_database()\n        job_uuid = uuid.uuid4()\n        # Create a new job\n        self.cursor.execute(\n            self.sql_command['create_job'],\n            (\n                job_uuid,\n                self.run_uuid,\n                job_output.status,\n                job_output.retry,\n                job_output.label,\n                job_output.id,\n                job_output.iteration,\n                job_output.spec.workload_name,\n                job_output.metadata,\n                job_output.spec._pod_version,  # pylint: disable=protected-access\n                job_output.spec._pod_serialization_version,  # pylint: disable=protected-access\n            )\n        )\n\n        for classifier in job_output.classifiers:\n            classifier_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_classifier'],\n                (\n                    classifier_uuid,\n                    None,\n                    None,\n                    job_uuid,\n                    None,\n                    classifier,\n                    job_output.classifiers[classifier]\n                )\n            )\n        # Update the run table and run-level parameters\n        self.cursor.execute(\n            self.sql_command['update_run'],\n            (\n                run_output.event_summary,\n                run_output.status,\n                run_output.state.timestamp,\n                run_output.info.end_time,\n                None,\n                json.dumps(run_output.state.to_pod()),\n                self.run_uuid))\n        for classifier in run_output.classifiers:\n            classifier_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_classifier'],\n                (\n                    classifier_uuid,\n                    None,\n                    None,\n                    None,\n                    None,\n                    self.run_uuid,\n                    classifier,\n                    run_output.classifiers[classifier]\n                )\n            )\n        self.sql_upload_artifacts(run_output, record_in_added=True)\n        self.sql_upload_metrics(run_output, record_in_added=True)\n        self.sql_upload_augmentations(run_output)\n        self.sql_upload_resource_getters(run_output)\n        self.sql_upload_events(job_output, job_uuid=job_uuid)\n        self.sql_upload_artifacts(job_output, job_uuid=job_uuid)\n        self.sql_upload_metrics(job_output, job_uuid=job_uuid)\n        self.sql_upload_job_augmentations(job_output, job_uuid=job_uuid)\n        self.sql_upload_parameters(\n            \"workload\",\n            job_output.spec.workload_parameters,\n            job_uuid=job_uuid)\n        self.sql_upload_parameters(\n            \"runtime\",\n            job_output.spec.runtime_parameters,\n            job_uuid=job_uuid)\n        self.conn.commit()\n\n    def export_run_output(self, run_output, target_info):  # pylint: disable=unused-argument, too-many-locals\n        ''' A final export of the RunOutput that updates existing parameters\n            and uploads ones which are only generated after jobs have run.\n        '''\n        if self.cursor is None:  # Output processor did not initialise correctly.\n            return\n        # Ensure we're still connected to the database.\n        self.connect_to_database()\n\n        # Update the job statuses following completion of the run\n        for job in run_output.jobs:\n            job_id = job.id\n            job_status = job.status\n            self.cursor.execute(\n                \"UPDATE Jobs SET status=%s WHERE job_id=%s and run_oid=%s\",\n                (\n                    job_status,\n                    job_id,\n                    self.run_uuid\n                )\n            )\n\n        run_uuid = self.run_uuid\n        # Update the run entry after jobs have completed\n        run_info_pod = run_output.info.to_pod()\n        run_state_pod = run_output.state.to_pod()\n        sql_command_update_run = self.sql_command['update_run']\n        self.cursor.execute(\n            sql_command_update_run,\n            (\n                run_output.event_summary,\n                run_output.status,\n                run_info_pod['start_time'],\n                run_info_pod['end_time'],\n                run_info_pod['duration'],\n                json.dumps(run_state_pod),\n                run_uuid,\n            )\n        )\n        self.sql_upload_events(run_output)\n        self.sql_upload_artifacts(run_output, check_uniqueness=True)\n        self.sql_upload_metrics(run_output, check_uniqueness=True)\n        self.sql_upload_augmentations(run_output)\n        self.conn.commit()\n\n    # Upload functions for use with both jobs and runs\n\n    def sql_upload_resource_getters(self, output_object):\n        for resource_getter in output_object.run_config.resource_getters:\n            resource_getter_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_resource_getter'],\n                (\n                    resource_getter_uuid,\n                    self.run_uuid,\n                    resource_getter,\n                )\n            )\n            self.sql_upload_parameters(\n                'resource_getter',\n                output_object.run_config.resource_getters[resource_getter],\n                owner_id=resource_getter_uuid,\n            )\n\n    def sql_upload_events(self, output_object, job_uuid=None):\n        for event in output_object.events:\n            event_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_event'],\n                (\n                    event_uuid,\n                    self.run_uuid,\n                    job_uuid,\n                    event.timestamp,\n                    event.message,\n                    event._pod_version,  # pylint: disable=protected-access\n                    event._pod_serialization_version,  # pylint: disable=protected-access\n                )\n            )\n\n    def sql_upload_job_augmentations(self, output_object, job_uuid=None):\n        ''' This is a table which links the uuids of augmentations to jobs.\n        Note that the augmentations table is prepopulated, leading to the necessity\n        of an augmentaitions_already_added dictionary, which gives us the corresponding\n        uuids.\n        Augmentations which are prefixed by ~ are toggled off and not part of the job,\n        therefore not added.\n        '''\n        for augmentation in output_object.spec.augmentations:\n            if augmentation.startswith('~'):\n                continue\n            augmentation_uuid = self.augmentations_already_added[augmentation]\n            job_aug_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_job_aug'],\n                (\n                    job_aug_uuid,\n                    job_uuid,\n                    augmentation_uuid,\n                )\n            )\n\n    def sql_upload_augmentations(self, output_object):\n        for augmentation in output_object.augmentations:\n            if augmentation.startswith('~') or augmentation in self.augmentations_already_added:\n                continue\n            augmentation_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_augmentation'],\n                (\n                    augmentation_uuid,\n                    self.run_uuid,\n                    augmentation,\n                )\n            )\n            self.sql_upload_parameters(\n                'augmentation',\n                output_object.run_config.augmentations[augmentation],\n                owner_id=augmentation_uuid,\n            )\n            self.augmentations_already_added[augmentation] = augmentation_uuid\n\n    def sql_upload_metrics(self, output_object, record_in_added=False, check_uniqueness=False, job_uuid=None):\n        for metric in output_object.metrics:\n            if metric in self.metrics_already_added and check_uniqueness:\n                continue\n            metric_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_metric'],\n                (\n                    metric_uuid,\n                    self.run_uuid,\n                    job_uuid,\n                    metric.name,\n                    metric.value,\n                    metric.units,\n                    metric.lower_is_better,\n                    metric._pod_version,  # pylint: disable=protected-access\n                    metric._pod_serialization_version,  # pylint: disable=protected-access\n                )\n            )\n            for classifier in metric.classifiers:\n                classifier_uuid = uuid.uuid4()\n                self.cursor.execute(\n                    self.sql_command['create_classifier'],\n                    (\n                        classifier_uuid,\n                        None,\n                        metric_uuid,\n                        None,\n                        None,\n                        classifier,\n                        metric.classifiers[classifier],\n                    )\n                )\n            if record_in_added:\n                self.metrics_already_added.append(metric)\n\n    def sql_upload_artifacts(self, output_object, record_in_added=False, check_uniqueness=False, job_uuid=None):\n        ''' Uploads artifacts to the database.\n        record_in_added will record the artifacts added in artifacts_aleady_added\n        check_uniqueness will ensure artifacts in artifacts_already_added do not get added again\n        '''\n        for artifact in output_object.artifacts:\n            if artifact in self.artifacts_already_added and check_uniqueness:\n                self.logger.debug('Skipping uploading {} as already added'.format(artifact))\n                continue\n\n            if artifact in self.artifacts_already_added:\n                self._sql_update_artifact(artifact, output_object)\n            else:\n                self._sql_create_artifact(artifact, output_object, record_in_added, job_uuid)\n\n    def sql_upload_parameters(self, parameter_type, parameter_dict, owner_id=None, job_uuid=None):\n        # Note, currently no augmentation parameters are workload specific, but in the future\n        # this may change\n        augmentation_id = None\n        resource_getter_id = None\n\n        if parameter_type not in ['workload', 'resource_getter', 'augmentation', 'runtime']:\n            # boot parameters are not yet implemented\n            # device parameters are redundant due to the targets table\n            raise NotImplementedError(\"{} is not a valid parameter type.\".format(parameter_type))\n\n        if parameter_type == \"resource_getter\":\n            resource_getter_id = owner_id\n        elif parameter_type == \"augmentation\":\n            augmentation_id = owner_id\n\n        for parameter in parameter_dict:\n            parameter_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_parameter'],\n                (\n                    parameter_uuid,\n                    self.run_uuid,\n                    job_uuid,\n                    augmentation_id,\n                    resource_getter_id,\n                    parameter,\n                    json.dumps(parameter_dict[parameter]),\n                    str(type(parameter_dict[parameter])),\n                    parameter_type,\n                )\n            )\n\n    def connect_to_database(self):\n        dsn = \"dbname={} user={} password={} host={} port={}\".format(\n            self.dbname, self.username, self.password, self.host, self.port)\n        try:\n            self.conn = connect(dsn=dsn)\n        except Psycopg2Error as e:\n            raise OutputProcessorError(\n                \"Database error, if the database doesn't exist, \"\n                + \"please use 'wa create database' to create the database: {}\".format(e))\n        self.cursor = self.conn.cursor()\n        self.verify_schema_versions()\n\n    def execute_sql_line_by_line(self, sql):\n        cursor = self.conn.cursor()\n        for line in sql.replace('\\n', \"\").replace(\";\", \";\\n\").split(\"\\n\"):\n            if line and not line.startswith('--'):\n                cursor.execute(line)\n        cursor.close()\n        self.conn.commit()\n        self.conn.reset()\n\n    def verify_schema_versions(self):\n        local_schema_version, db_schema_version = get_schema_versions(self.conn)\n        if local_schema_version != db_schema_version:\n            self.cursor.close()\n            self.cursor = None\n            self.conn.commit()\n            self.conn.reset()\n            msg = 'The current database schema is v{} however the local ' \\\n                  'schema version is v{}. Please update your database ' \\\n                  'with the create command'\n            raise OutputProcessorError(msg.format(db_schema_version, local_schema_version))\n\n    def _sql_write_file_lobject(self, source, lobject):\n        with open(source) as lobj_file:\n            lobj_data = lobj_file.read()\n        if len(lobj_data) > 50000000:  # Notify if LO inserts larger than 50MB\n            self.logger.debug(\"Inserting large object of size {}\".format(len(lobj_data)))\n        lobject.write(lobj_data)\n        self.conn.commit()\n\n    def _sql_write_dir_lobject(self, source, lobject):\n        with tarfile.open(fileobj=lobject, mode='w|gz') as lobj_dir:\n            lobj_dir.add(source, arcname='.')\n        self.conn.commit()\n\n    def _sql_update_artifact(self, artifact, output_object):\n        self.logger.debug('Updating artifact: {}'.format(artifact))\n        lobj = self.conn.lobject(oid=self.artifacts_already_added[artifact], mode='w')\n        if artifact.is_dir:\n            self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj)\n        else:\n            self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj)\n\n    def _sql_create_artifact(self, artifact, output_object, record_in_added=False, job_uuid=None):\n        self.logger.debug('Uploading artifact: {}'.format(artifact))\n        artifact_uuid = uuid.uuid4()\n        lobj = self.conn.lobject()\n        loid = lobj.oid\n        large_object_uuid = uuid.uuid4()\n        if artifact.is_dir:\n            self._sql_write_dir_lobject(os.path.join(output_object.basepath, artifact.path), lobj)\n        else:\n            self._sql_write_file_lobject(os.path.join(output_object.basepath, artifact.path), lobj)\n\n        self.cursor.execute(\n            self.sql_command['create_large_object'],\n            (\n                large_object_uuid,\n                loid,\n            )\n        )\n        self.cursor.execute(\n            self.sql_command['create_artifact'],\n            (\n                artifact_uuid,\n                self.run_uuid,\n                job_uuid,\n                artifact.name,\n                large_object_uuid,\n                artifact.description,\n                str(artifact.kind),\n                artifact.is_dir,\n                artifact._pod_version,  # pylint: disable=protected-access\n                artifact._pod_serialization_version,  # pylint: disable=protected-access\n            )\n        )\n        for classifier in artifact.classifiers:\n            classifier_uuid = uuid.uuid4()\n            self.cursor.execute(\n                self.sql_command['create_classifier'],\n                (\n                    classifier_uuid,\n                    artifact_uuid,\n                    None,\n                    None,\n                    None,\n                    classifier,\n                    artifact.classifiers[classifier],\n                )\n            )\n        if record_in_added:\n            self.artifacts_already_added[artifact] = loid\n"
  },
  {
    "path": "wa/output_processors/sqlite.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=attribute-defined-outside-init\n\nimport os\nimport sqlite3\nimport uuid\nfrom datetime import datetime, timedelta\nfrom contextlib import contextmanager\n\nfrom wa import OutputProcessor, Parameter, OutputProcessorError\nfrom wa.utils.serializer import json\nfrom wa.utils.types import boolean\n\n\n# IMPORTANT: when updating this schema, make sure to bump the version!\nSCHEMA_VERSION = '0.0.2'\nSCHEMA = [\n    '''CREATE TABLE  runs (\n        uuid text,\n        start_time datetime,\n        end_time datetime,\n        duration integer\n    )''',\n    '''CREATE TABLE  workload_specs (\n        id text,\n        run_oid text,\n        number_of_iterations integer,\n        label text,\n        workload_name text,\n        boot_parameters text,\n        runtime_parameters text,\n        workload_parameters text\n    )''',\n    '''CREATE TABLE  metrics (\n        spec_oid int,\n        iteration integer,\n        metric text,\n        value text,\n        units text,\n        lower_is_better integer\n    )''',\n    '''CREATE VIEW results AS\n       SELECT uuid as run_uuid, spec_id, label as workload, iteration, metric, value, units, lower_is_better\n       FROM metrics AS m INNER JOIN (\n            SELECT ws.OID as spec_oid, ws.id as spec_id, uuid, label\n            FROM workload_specs AS ws INNER JOIN runs AS r ON ws.run_oid = r.OID\n       ) AS wsr ON wsr.spec_oid = m.spec_oid\n    ''',\n    '''CREATE TABLE  __meta (\n        schema_version text\n    )''',\n    '''INSERT INTO __meta VALUES (\"{}\")'''.format(SCHEMA_VERSION),\n]\n\n\nsqlite3.register_adapter(datetime, lambda x: x.isoformat())\nsqlite3.register_adapter(timedelta, lambda x: x.total_seconds())\nsqlite3.register_adapter(uuid.UUID, str)\n\n\nclass SqliteResultProcessor(OutputProcessor):\n\n    name = 'sqlite'\n    description = \"\"\"\n    Stores results in an sqlite database.\n\n    This may be used to accumulate results of multiple runs in a single file.\n\n    \"\"\"\n    parameters = [\n        Parameter('database', default=None,\n                  global_alias='sqlite_database',\n                  description=\"\"\"\n                  Full path to the sqlite database to be used. If this is not\n                  specified then a new database file will be created in the\n                  output directory. This setting can be used to accumulate\n                  results from multiple runs in a single database. If the\n                  specified file does not exist, it will be created, however\n                  the directory of the file must exist.\n\n                  .. note:: The value must resolve to an absolute path,\n                            relative paths are not allowed; however the\n                            value may contain environment variables and/or\n                            the home reference \"~\".\n                  \"\"\"),\n        Parameter('overwrite', kind=boolean, default=False,\n                  global_alias='sqlite_overwrite',\n                  description=\"\"\"\n                  If ``True``, this will overwrite the database file\n                  if it already exists. If ``False`` (the default) data\n                  will be added to the existing file (provided schema\n                  versions match -- otherwise an error will be raised).\n                  \"\"\"),\n\n    ]\n\n    def __init__(self, *args, **kwargs):\n        super(SqliteResultProcessor, self).__init__(*args, **kwargs)\n        self._last_spec = None\n        self._run_oid = None\n        self._spec_oid = None\n        self._run_initialized = False\n\n    def export_job_output(self, job_output, target_info, run_output):  # pylint: disable=unused-argument\n        if not self._run_initialized:\n            self._init_run(run_output)\n\n        if self._last_spec != job_output.spec:\n            self._update_spec(job_output.spec)\n\n        metrics = [(self._spec_oid, job_output.iteration, m.name, str(m.value), m.units, int(m.lower_is_better))\n                   for m in job_output.metrics]\n        if metrics:\n            with self._open_connection() as conn:\n                conn.executemany('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)\n\n    def export_run_output(self, run_output, target_info):  # pylint: disable=unused-argument\n        if not self._run_initialized:\n            self._init_run(run_output)\n\n        metrics = [(self._spec_oid, run_output.iteration, m.name, str(m.value), m.units, int(m.lower_is_better))\n                   for m in run_output.metrics]\n        if metrics:\n            with self._open_connection() as conn:\n                conn.executemany('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)\n\n        info = run_output.info\n        with self._open_connection() as conn:\n            conn.execute('''UPDATE runs SET start_time=?, end_time=?, duration=?\n                            WHERE OID=?''', (info.start_time, info.end_time, info.duration, self._run_oid))\n\n    def _init_run(self, run_output):\n        if not self.database:  # pylint: disable=access-member-before-definition\n            self.database = os.path.join(run_output.basepath, 'results.sqlite')\n        self.database = os.path.expandvars(os.path.expanduser(self.database))\n\n        if not os.path.exists(self.database):\n            self._init_db()\n        elif self.overwrite:  # pylint: disable=no-member\n            os.remove(self.database)\n            self._init_db()\n        else:\n            self._validate_schema_version()\n        self._update_run(run_output.info.uuid)\n\n        # if the database file happens to be in the output directory, add it as an\n        # artifiact; if it isn't, then RunOutput doesn't need to keep track of it.\n        if not os.path.relpath(self.database, run_output.basepath).startswith('..'):\n            run_output.add_artifact('sqlitedb', self.database, kind='export')\n\n        self._run_initialized = True\n\n    def _init_db(self):\n        with self._open_connection() as conn:\n            for command in SCHEMA:\n                conn.execute(command)\n\n    def _validate_schema_version(self):\n        with self._open_connection() as conn:\n            try:\n                c = conn.execute('SELECT schema_version FROM __meta')\n                found_version = c.fetchone()[0]\n            except sqlite3.OperationalError:\n                message = '{} does not appear to be a valid WA results database.'.format(self.database)\n                raise OutputProcessorError(message)\n            if found_version != SCHEMA_VERSION:\n                message = 'Schema version in {} ({}) does not match current version ({}).'\n                raise OutputProcessorError(message.format(self.database, found_version, SCHEMA_VERSION))\n\n    def _update_run(self, run_uuid):\n        with self._open_connection() as conn:\n            conn.execute('INSERT INTO runs (uuid) VALUES (?)', (run_uuid,))\n            conn.commit()\n            c = conn.execute('SELECT OID FROM runs WHERE uuid=?', (run_uuid,))\n            self._run_oid = c.fetchone()[0]\n\n    def _update_spec(self, spec):\n        self._last_spec = spec\n        spec_tuple = (spec.id, self._run_oid, spec.iterations, spec.label, spec.workload_name,\n                      json.dumps(spec.boot_parameters.to_pod()),\n                      json.dumps(spec.runtime_parameters.to_pod()),\n                      json.dumps(spec.workload_parameters.to_pod()))\n        with self._open_connection() as conn:\n            conn.execute('INSERT INTO workload_specs VALUES (?,?,?,?,?,?,?,?)', spec_tuple)\n            conn.commit()\n            c = conn.execute('SELECT OID FROM workload_specs WHERE run_oid=? AND id=?', (self._run_oid, spec.id))\n            self._spec_oid = c.fetchone()[0]\n\n    @contextmanager\n    def _open_connection(self):\n        conn = sqlite3.connect(self.database)\n        try:\n            yield conn\n        finally:\n            conn.commit()\n"
  },
  {
    "path": "wa/output_processors/status.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=R0201\nimport time\nfrom collections import Counter\n\nfrom wa.framework.output import Status\nfrom wa.framework.output_processor import OutputProcessor\nfrom wa.utils.misc import write_table\n\n\nclass StatusTxtReporter(OutputProcessor):\n    name = 'status'\n    description = \"\"\"\n    Outputs a txt file containing general status information about which runs\n    failed and which were successful\n\n    \"\"\"\n\n    def process_run_output(self, output, target_info):  # pylint: disable=unused-argument\n        counter = Counter()\n        for jo in output.jobs:\n            counter[jo.status] += 1\n\n        outfile = output.get_path('status.txt')\n        self.logger.info('Status available in {}'.format(outfile))\n        with open(outfile, 'w') as wfh:\n            wfh.write('Run name: {}\\n'.format(output.info.run_name))\n            wfh.write('Run status: {}\\n'.format(output.status))\n            wfh.write('Date: {}\\n'.format(time.strftime(\"%c\")))\n            if output.events:\n                wfh.write('Events:\\n')\n                for event in output.events:\n                    wfh.write('\\t{}\\n'.format(event.summary))\n\n            txt = '{}/{} iterations completed without error\\n'\n            wfh.write(txt.format(counter[Status.OK], len(output.jobs)))\n            wfh.write('\\n')\n            status_lines = [list(map(str, [o.id, o.label, o.iteration, o.status,\n                            o.event_summary]))\n                            for o in output.jobs]\n            write_table(status_lines, wfh, align='<<>><')\n\n        output.add_artifact('run_status_summary', 'status.txt', 'export')\n"
  },
  {
    "path": "wa/output_processors/targz.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport shutil\nimport tarfile\n\nfrom wa import OutputProcessor, Parameter\nfrom wa.framework import signal\n\n\nclass TargzProcessor(OutputProcessor):\n\n    name = 'targz'\n\n    description = '''\n    Create a tarball of the output directory.\n\n    This will create a gzip-compressed tarball of the output directory. By\n    default, it will be created at the same level and will have the same name\n    as the output directory but with a .tar.gz extensions.\n    '''\n\n    parameters = [\n        Parameter('outfile',\n                  description='''\n                  The name  of the output file to be used. If this is not an\n                  absolute path, the file will be created realtive to the\n                  directory in which WA was invoked. If this contains\n                  subdirectories, they must already exist.\n\n                  The name may contain named format specifiers. Any of the\n                  ``RunInfo`` fields can be named, resulting in the value of\n                  that filed (e.g. ``'start_time'``) being formatted into the\n                  tarball name.\n\n                  By default, the output file will be created at the same\n                  level, share the name of the WA output directory (but with\n                  .tar.gz extension).\n                  '''),\n        Parameter('delete-output', kind=bool, default=False,\n                  description='''\n                  if set to ``True``, WA output directory will be deleted after\n                  the tarball is created.\n                  '''),\n    ]\n\n    def initialize(self, context):\n        if self.delete_output:\n            self.logger.debug('Registering RUN_FINALIZED handler.')\n            signal.connect(self.delete_output_directory, signal.RUN_FINALIZED, priority=-100)\n\n    def export_run_output(self, run_output, target_info):  # pylint: disable=unused-argument\n        if self.outfile:\n            outfile_path = self.outfile.format(**run_output.info.to_pod())\n        else:\n            outfile_path = run_output.basepath.rstrip('/') + '.tar.gz'\n\n        self.logger.debug('Creating {}'.format(outfile_path))\n        with tarfile.open(outfile_path, 'w:gz') as tar:\n            tar.add(run_output.basepath)\n\n    def delete_output_directory(self, context):\n        self.logger.debug('Deleting output directory')\n        shutil.rmtree(context.run_output.basepath)\n"
  },
  {
    "path": "wa/output_processors/uxperf.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa import OutputProcessor\nfrom wa.utils.android import LogcatParser\n\n\nclass UxperfProcessor(OutputProcessor):\n\n    name = 'uxperf'\n\n    description = '''\n    Parse logcat for UX_PERF markers to produce performance metrics for\n    workload actions using specified instrumentation.\n    An action represents a series of UI interactions to capture.\n    NOTE: The UX_PERF markers are turned off by default and must be enabled in\n    a agenda file by setting ``markers_enabled`` for the workload to ``True``.\n    '''\n\n    # pylint: disable=too-many-locals,unused-argument\n    def process_job_output(self, output, target_info, job_output):\n        logcat = output.get_artifact('logcat')\n        if not logcat:\n            return\n\n        parser = LogcatParser()\n        start_times = {}\n\n        filepath = output.get_path(logcat.path)\n        for entry in parser.parse(filepath):\n            if not entry.tag == 'UX_PERF':\n                continue\n\n            parts = entry.message.split()\n            if len(parts) != 3:\n                message = 'Unexpected UX_PERF message @ {}: {}'\n                self.logger.warning(message.format(entry.timestamp, entry.message))\n                continue\n\n            action, state, when = parts\n            when = int(when)\n            if state == 'start':\n                if action in start_times:\n                    self.logger.warning('start before end @ {}'.format(entry.timestamp))\n                start_times[action] = when\n            elif state == 'end':\n                start_time = start_times.pop(action, None)\n                if start_time is None:\n                    self.logger.warning('end without start @ {}'.format(entry.timestamp))\n                    continue\n\n                duration = (when - start_time) / 1000\n                metric_name = '{}_duration'.format(action)\n                output.add_metric(metric_name, duration, 'microseconds',\n                                  lower_is_better=True)\n\n            else:\n                self.logger.warning('Unexpected state \"{}\" @ {}'.format(state, entry.timestamp))\n"
  },
  {
    "path": "wa/tools/revent/Makefile",
    "content": "# CROSS_COMPILE=aarch64-linux-gnu- make\n#\nCC=gcc\n\nifdef DEBUG\n\tCFLAGS=-static -lc -g\nelse\n\tCFLAGS=-static -lc -O2\nendif\n\nrevent: revent.c\n\t$(CROSS_COMPILE)$(CC) $(CFLAGS) revent.c -o revent\n\nclean:\n\trm -rf revent\n\n.PHONY: clean\n"
  },
  {
    "path": "wa/tools/revent/revent.c",
    "content": "/*    Copyright 2012-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include <ctype.h>\n#include <errno.h>\n#include <fcntl.h>\n#include <limits.h>\n#include <signal.h>\n#include <stdint.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include <time.h>\n#include <unistd.h>\n#include <sys/ioctl.h>\n#include <sys/stat.h>\n#include <linux/input.h>\n#include <linux/uinput.h>\n\n#define die(args...) do { \\\n\tfprintf(stderr, \"ERROR: \"); \\\n\tfprintf(stderr, args);   \\\n\tfprintf(stderr, \"\\n\");  \\\n\texit(EXIT_FAILURE); \\\n} while(0)\n\n#define dprintf(args...) if (verbose) printf(args)\n\n#define INPDEV_MAX_DEVICES  16\n#define INPDEV_MAX_PATH     30\n#define MAX_NAME_LEN 255\n#define EV_BITS_SIZE (EV_MAX / 8 + 1)\n#define KEY_BITS_SIZE (KEY_MAX / 8 + 1)\n\n\n#define HEADER_PADDING_SIZE 6\n#define EVENT_PADDING_SIZE 4\n\nconst char MAGIC[] = \"REVENT\";\n\n// NOTE: This should be incremented if any changes are made to the file format.\n//       Should that be the case, also make sure to update the format description\n//       in doc/source/revent.rst and the Python parser in wa/utils/revent.py.\nuint16_t FORMAT_VERSION = 3;\n\ntypedef enum {\n\tFALSE=0,\n\tTRUE\n} bool_t;\n\ntypedef enum {\n\tGENERAL_MODE=0,\n\tGAMEPAD_MODE,\n\tINVALID_MODE  // should be last\n} recording_mode_t;\n\ntypedef enum {\n\tRECORD_COMMAND=0,\n\tREPLAY_COMMAND,\n\tDUMP_COMMAND,\n\tINFO_COMMAND,\n\tINVALID_COMMAND\n} revent_command_t;\n\ntypedef struct {\n\tstruct input_absinfo absinfo;\n\tint ev_code;\n} absinfo_t;\n\ntypedef struct {\n\tstruct input_id id;\n\tchar name[MAX_NAME_LEN];\n\tchar ev_bits[EV_BITS_SIZE];\n\tchar abs_bits[KEY_BITS_SIZE];\n\tchar rel_bits[KEY_BITS_SIZE];\n\tchar key_bits[KEY_BITS_SIZE];\n\tuint32_t num_absinfo;\n\tabsinfo_t absinfo[ABS_CNT];\n} device_info_t;\n\ntypedef struct {\n\trevent_command_t command;\n\trecording_mode_t mode;\n\tint32_t record_time;\n\tint32_t device_number;\n\tchar *file;\n} revent_args_t;\n\ntypedef struct {\n\tint32_t num;\n\tchar **paths;\n\tint *fds;\n\tint max_fd;\n} input_devices_t;\n\ntypedef struct {\n\tint16_t dev_idx;\n\tstruct input_event event;\n} replay_event_t;\n\ntypedef struct {\n\tuint16_t version;\n\trecording_mode_t mode;\n} revent_record_desc_t;\n\ntypedef struct {\n\trevent_record_desc_t desc;\n\tinput_devices_t devices;\n\tdevice_info_t *gamepad_info;\n\tuint64_t num_events;\n\tstruct timeval start_time;\n\tstruct timeval end_time;\n\treplay_event_t *events;\n} revent_recording_t;\n\nbool_t verbose = FALSE;\nbool_t wait_for_stdin = TRUE;\n\nbool_t is_numeric(char *string)\n{\n\tint len = strlen(string);\n\n\tint i = 0;\n\twhile(i < len)\n\t{\n\t\tif(!isdigit(string[i]))\n\t\t\treturn FALSE;\n\t\ti++;\n\t}\n\n\treturn TRUE;\n}\n\nint test_bit(const char *mask, int bit) {\n\treturn mask[bit / 8] & (1 << (bit % 8));\n}\n\nint count_bits(const char *mask) {\n\tint count = 0, i;\n\tstatic const uint8_t nybble_lookup[16] = {\n\t\t0, 1, 1, 2, 1, 2, 2, 3,\n\t\t1, 2, 2, 3, 2, 3, 3, 4\n\t};\n\n\tfor (i = 0; i < KEY_MAX/8 + 1; i++) {\n\t\tchar byte = mask[i];\n\t\tcount +=  nybble_lookup[byte & 0x0F] + nybble_lookup[byte >> 4];\n\t}\n\n\treturn count;\n}\n\n/*\n * An input device is considered to be a gamepad if it supports\n * ABS x and Y axes and the four gamepad buttons (variously known as\n * square/triangle/circle/X, A/B/X/Y, or north/south/east/west).\n */\nbool_t is_gamepad(device_info_t *dev)\n{\n\tif (!test_bit(dev->abs_bits, ABS_X))\n\t\treturn FALSE;\n\tif (!test_bit(dev->abs_bits, ABS_Y))\n\t\treturn FALSE;\n\tif (!test_bit(dev->key_bits, BTN_GAMEPAD))\n\t\treturn FALSE;\n\treturn TRUE;\n}\n\noff_t get_file_size(const char *filename) {\n\tstruct stat st;\n\n\tif (stat(filename, &st) == 0)\n\t\treturn st.st_size;\n\n\tdie(\"Cannot determine size of %s: %s\", filename, strerror(errno));\n}\n\nint get_device_info(int fd, device_info_t *info) {\n\tbzero(info, sizeof(device_info_t));\n\n\tif (ioctl(fd, EVIOCGID, &info->id) < 0)\n\t\treturn errno;\n\n\tif (ioctl(fd, EVIOCGNAME(MAX_NAME_LEN * sizeof(char)), &info->name) < 0)\n\t\treturn errno;\n\n\tif (ioctl(fd, EVIOCGBIT(0, sizeof(info->ev_bits)), &info->ev_bits) < 0)\n\t\treturn errno;\n\n\tint ev_type;\n\tfor (ev_type = 0 ; ev_type < EV_MAX; ev_type++) {\n\t\tif (test_bit(info->ev_bits, ev_type)) {\n\n\t\t\tif (ev_type == EV_ABS) {\n\t\t\t\tioctl(fd, EVIOCGBIT(ev_type, sizeof(info->abs_bits)), &info->abs_bits);\n\n\t\t\t\tint ev_code;\n\t\t\t\tfor (ev_code = 0; ev_code < KEY_MAX; ev_code++) {\n\t\t\t\t\tif (test_bit(info->abs_bits, ev_code)) {\n\t\t\t\t\t\tabsinfo_t *inf = &info->absinfo[info->num_absinfo++];\n\t\t\t\t\t\tinf->ev_code = ev_code;\n\t\t\t\t\t\tioctl(fd, EVIOCGABS(ev_code), &inf->absinfo);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else if (ev_type == EV_REL) {\n\t\t\t\tioctl(fd, EVIOCGBIT(ev_type, sizeof(info->rel_bits)), &info->rel_bits);\n\t\t\t} else if (ev_type == EV_KEY) {\n\t\t\t\tioctl(fd, EVIOCGBIT(ev_type, sizeof(info->key_bits)), &info->key_bits);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn 0;\n}\n\nvoid destroy_replay_device(int fd)\n{\n\tif(ioctl(fd, UI_DEV_DESTROY) < 0)\n\t\tdie(\"Could not destroy replay device\");\n}\n\ninline void set_evbit(int fd, int bit)\n{\n\tif(ioctl(fd, UI_SET_EVBIT, bit) < 0)\n\t\tdie(\"Could not set EVBIT %i\", bit);\n}\n\ninline void set_keybit(int fd, int bit)\n{\n\tif(ioctl(fd, UI_SET_KEYBIT, bit) < 0)\n\t\tdie(\"Could not set KEYBIT %i\", bit);\n}\n\ninline void set_absbit(int fd, int bit)\n{\n\tif(ioctl(fd, UI_SET_ABSBIT, bit) < 0)\n\t\tdie(\"Could not set ABSBIT %i\", bit);\n}\n\ninline void set_relbit(int fd, int bit)\n{\n\tif(ioctl(fd, UI_SET_RELBIT, bit) < 0)\n\t\tdie(\"Could not set RELBIT %i\", bit);\n}\n\ninline void block_sigterm(sigset_t *oldset)\n{\n\tsigset_t sigset;\n\tsigemptyset(&sigset);\n\tsigaddset(&sigset, SIGTERM);\n\tsigprocmask(SIG_BLOCK, &sigset, oldset);\n}\n\n// Events are recorded with their original timestamps, but for playback, we\n// want to treat timestamps as deltas from event zero.\nvoid adjust_timestamps(revent_recording_t *recording)\n{\n\tuint64_t i;\n\tstruct timeval time_zero, time_delta;\n\n\ttime_zero.tv_sec = recording->start_time.tv_sec;\n\ttime_zero.tv_usec = recording->start_time.tv_usec;\n\n\tfor(i = 0; i < recording->num_events; i++) {\n\t\ttimersub(&recording->events[i].event.time, &time_zero, &time_delta);\n\t\trecording->events[i].event.time.tv_sec = time_delta.tv_sec;\n\t\trecording->events[i].event.time.tv_usec = time_delta.tv_usec;\n\t}\n\ttimersub(&recording->end_time, &time_zero, &time_delta);\n\trecording->end_time.tv_sec = time_delta.tv_sec;\n\trecording->end_time.tv_usec = time_delta.tv_usec;\n}\n\nint write_record_header(int fd, const revent_record_desc_t *desc)\n{\n\tssize_t ret;\n\tchar padding[HEADER_PADDING_SIZE];\n\n\tret = write(fd, MAGIC, 6);\n\tif (ret < 6)\n\t\treturn errno;\n\n\tret = write(fd, &desc->version, sizeof(desc->version));\n\tif (ret < sizeof(desc->version))\n\t\treturn errno;\n\n\tret = write(fd, (uint16_t *)&desc->mode, sizeof(uint16_t));\n\tif (ret < sizeof(uint16_t))\n\t\treturn errno;\n\n\tbzero(padding, HEADER_PADDING_SIZE);\n\tret = write(fd, padding, HEADER_PADDING_SIZE);\n\tif (ret < HEADER_PADDING_SIZE)\n\t\treturn errno;\n\n\treturn 0;\n}\n\nint read_record_header(int fd, revent_record_desc_t *desc)\n{\n\tchar start[7], padding[HEADER_PADDING_SIZE];\n\tssize_t ret;\n\n\tret = read(fd, start, 6);\n\tif (ret < 6)\n\t\treturn errno;\n\n\tstart[6] = '\\0';\n\tif (strcmp(start, MAGIC))\n\t\treturn EINVAL;\n\n\tret = read(fd, &desc->version, sizeof(desc->version));\n\tif (ret < sizeof(desc->version))\n\t\treturn errno;\n\n\tif (desc->version >= 2) {\n\t\tret = read(fd, &desc->mode, sizeof(uint16_t));\n\t\tif (ret < sizeof(uint16_t))\n\t\t\treturn errno;\n\n\t\tret = read(fd, padding, HEADER_PADDING_SIZE);\n\t\tif (ret < HEADER_PADDING_SIZE)\n\t\t\treturn errno;\n\t} else {\n\t\t/* Version 1 supports only general recordings (mode 0) and\n\t\t * does not have padding\n\t\t */\n\t\tdesc->mode = GENERAL_MODE;\n\t}\n\n\treturn 0;\n}\n\nint write_general_input_devices(const input_devices_t *devices, FILE *fout)\n{\n\tsize_t ret;\n\tuint32_t path_len;\n\tint i;\n\n\tret = fwrite(&devices->num, sizeof(uint32_t), 1, fout);\n\tif (ret < 1) {\n\t\treturn errno;\n\t}\n\n\tfor (i = 0; i < devices->num; i++) {\n\t\tpath_len = (uint32_t)strlen(devices->paths[i]);\n\t\tret = fwrite(&path_len, sizeof(uint32_t), 1, fout);\n\t\tif (ret < 1) {\n\t\t\treturn errno;\n\t\t}\n\n\t\tret = fwrite(devices->paths[i], sizeof(char), path_len, fout);\n\t\tif (ret < path_len) {\n\t\t\treturn errno;\n\t\t}\n\t}\n\n\treturn 0;\n}\n\nint read_general_input_devices(input_devices_t *devices, FILE *fin)\n{\n\tsize_t ret;\n\tuint32_t path_len;\n\tint i;\n\n\tret = fread(&devices->num, sizeof(uint32_t), 1, fin);\n\tif (ret < 1) {\n\t\treturn EIO;\n\t}\n\n\tdevices->paths = malloc(sizeof(char *) * devices->num);\n\tif (devices->paths == NULL) {\n\t\treturn ENOMEM;\n\t}\n\n\tfor (i = 0; i < devices->num; i++) {\n\t\tret = fread(&path_len, sizeof(uint32_t), 1, fin);\n\t\tif (ret < 1) {\n\t\t\treturn EIO;\n\t\t}\n\n\t\tdevices->paths[i] = malloc(sizeof(char) * path_len + 1);\n\t\tif (devices->paths[i] == NULL) {\n\t\t\treturn ENOMEM;\n\t\t}\n\n\t\tret = fread(devices->paths[i], sizeof(char), path_len, fin);\n\t\tif (ret < path_len) {\n\t\t\treturn EIO;\n\t\t}\n\t\tdevices->paths[i][path_len] = '\\0';\n\t}\n\n\treturn 0;\n}\n\nint write_input_id(FILE *fout, const struct input_id *id)\n{\n\tint ret = 0;\n\tret += fwrite(&id->bustype, sizeof(uint16_t), 1, fout);\n\tret += fwrite(&id->vendor, sizeof(uint16_t), 1, fout);\n\tret += fwrite(&id->product, sizeof(uint16_t), 1, fout);\n\tret += fwrite(&id->version, sizeof(uint16_t), 1, fout);\n\tif (ret < 4)\n\t\treturn errno;\n\treturn 0;\n}\n\nint read_input_id(FILE *fin, struct input_id *id)\n{\n\tint ret = 0;\n\tret += fread(&id->bustype, sizeof(uint16_t), 1, fin);\n\tret += fread(&id->vendor, sizeof(uint16_t), 1, fin);\n\tret += fread(&id->product, sizeof(uint16_t), 1, fin);\n\tret += fread(&id->version, sizeof(uint16_t), 1, fin);\n\tif (ret < 4)\n\t\treturn errno;\n\treturn 0;\n}\n\nint write_absinfo(FILE *fout, const absinfo_t *info)\n{\n\tint ret = 0;\n\tret += fwrite(&info->ev_code, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.value, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.minimum, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.maximum, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.fuzz, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.flat, sizeof(int32_t), 1, fout);\n\tret += fwrite(&info->absinfo.resolution, sizeof(int32_t), 1, fout);\n\tif (ret < 7)\n\t\treturn errno;\n\treturn 0;\n}\n\nint read_absinfo(FILE *fin, absinfo_t *info)\n{\n\tint ret = 0;\n\tret += fread(&info->ev_code, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.value, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.minimum, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.maximum, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.fuzz, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.flat, sizeof(int32_t), 1, fin);\n\tret += fread(&info->absinfo.resolution, sizeof(int32_t), 1, fin);\n\tif (ret < 7)\n\t\treturn errno;\n\treturn 0;\n}\n\nint write_device_info(FILE *fout, const device_info_t *info)\n{\n\tint ret = write_input_id(fout, &info->id);\n\tif (ret)\n\t\treturn ret;\n\n\tuint32_t name_len = (uint32_t)strlen(info->name);\n\tret = fwrite(&name_len, sizeof(uint32_t), 1, fout);\n\tret += fwrite(info->name, sizeof(char), name_len, fout);\n\tif (ret < (name_len + 1))\n\t\treturn EIO;\n\n\tret = fwrite(info->ev_bits, sizeof(char), EV_BITS_SIZE, fout);\n\tret += fwrite(info->abs_bits, sizeof(char), KEY_BITS_SIZE, fout);\n\tret += fwrite(info->rel_bits, sizeof(char), KEY_BITS_SIZE, fout);\n\tret += fwrite(info->key_bits, sizeof(char), KEY_BITS_SIZE, fout);\n\tif (ret < (EV_BITS_SIZE + KEY_BITS_SIZE * 3))\n\t\treturn EIO;\n        printf(\"EV_BITS_SIZE: %d\\n\", EV_BITS_SIZE);\n        printf(\"KEY_BITS_SIZE: %d\\n\", KEY_BITS_SIZE);\n\n\tret = fwrite(&info->num_absinfo, sizeof(uint32_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tint i;\n\tfor (i = 0; i < info->num_absinfo; i++) {\n\t\tret = write_absinfo(fout, &info->absinfo[i]);\n\t\tif (ret)\n\t\t\treturn ret;\n\t}\n\n\treturn 0;\n}\n\nint read_device_info(FILE *fin, device_info_t *info)\n{\n\tint ret = read_input_id(fin, &info->id);\n\tif (ret)\n\t\treturn ret;\n\n\tuint32_t name_len = 0;\n\tfread(&name_len, sizeof(uint32_t), 1, fin);\n\tif (!name_len)\n\t\treturn EIO;\n\n\tret += fread(info->name, sizeof(char), name_len, fin);\n\tif (ret < name_len)\n\t\treturn EIO;\n\tinfo->name[name_len] = '\\0';\n\n\tret = fread(info->ev_bits, sizeof(char), EV_BITS_SIZE, fin);\n\tret += fread(info->abs_bits, sizeof(char), KEY_BITS_SIZE, fin);\n\tret += fread(info->rel_bits, sizeof(char), KEY_BITS_SIZE, fin);\n\tret += fread(info->key_bits, sizeof(char), KEY_BITS_SIZE, fin);\n\tif (ret < (EV_BITS_SIZE + KEY_BITS_SIZE * 3))\n\t\treturn EIO;\n\n\tret = fread(&info->num_absinfo, sizeof(uint32_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tint i;\n\tfor (i = 0; i < info->num_absinfo; i++) {\n\t\tret = read_absinfo(fin, &info->absinfo[i]);\n\t\tif (ret)\n\t\t\treturn ret;\n\t}\n\n\treturn 0;\n}\n\nvoid print_device_info(device_info_t *info)\n{\n\tprintf(\"device name: %s\\n\", info->name);\n\tprintf(\"bustype: 0x%x vendor: 0x%x product: 0x%x version: 0x%x\\n\",\n                info->id.bustype, info->id.vendor, info->id.product, info->id.version);\n\tprintf(\"abs_bits: %d\\n\", count_bits(info->abs_bits));\n\tprintf(\"rel_bits: %d\\n\", count_bits(info->rel_bits));\n\tprintf(\"key_bits: %d\\n\", count_bits(info->key_bits));\n\tprintf(\"num_absinfo: %ld\\n\", info->num_absinfo);\n\n\tint i;\n\tprintf(\"KEY: \");\n\tfor (i = 0; i < KEY_MAX; i++) {\n\t\tif (test_bit(info->key_bits, i)) {\n\t\t\tprintf(\"%04x \", i);\n\t\t}\n\t}\n\tprintf(\"\\n\");\n\n\tstruct input_absinfo *inf;\n\tint ev_code;\n\tprintf(\"ABS:\\n\");\n\tfor (i = 0; i < info->num_absinfo; i++) {\n\t\tev_code = info->absinfo[i].ev_code;\n\t\tinf = &info->absinfo[i].absinfo;\n\t\tprintf(\"%04x  : min %i, max %i, fuzz %0i, flat %i, res %i\\n\", ev_code,\n\t\t\t\tinf->minimum, inf->maximum, inf->fuzz, inf->flat,\n\t\t\t\tinf->resolution);\n\t}\n}\n\nint read_record_timestamps(FILE *fin, revent_recording_t *recording)\n{\n\tint ret;\n\tret = fread(&recording->start_time.tv_sec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&recording->start_time.tv_usec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&recording->end_time.tv_sec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&recording->end_time.tv_usec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\treturn 0;\n}\n\nint write_replay_event(FILE *fout, const replay_event_t *ev)\n{\n\tsize_t ret;\n\tuint64_t time;\n\n\tret = fwrite(&ev->dev_idx, sizeof(uint16_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\t\n\ttime = (uint64_t)ev->event.time.tv_sec;\n\tret = fwrite(&time, sizeof(uint64_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\ttime = (uint64_t)ev->event.time.tv_usec;\n\tret = fwrite(&time, sizeof(uint64_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fwrite(&ev->event.type, sizeof(uint16_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fwrite(&ev->event.code, sizeof(uint16_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fwrite(&ev->event.value, sizeof(uint32_t), 1, fout);\n\tif (ret < 1)\n\t\treturn errno;\n\n\treturn 0;\n}\n\nint read_replay_event(FILE *fin, replay_event_t *ev)\n{\n\tsize_t ret;\n\n\tret = fread(&ev->dev_idx, sizeof(uint16_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&ev->event.time.tv_sec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&ev->event.time.tv_usec, sizeof(uint64_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&ev->event.type, sizeof(uint16_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&ev->event.code, sizeof(uint16_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\tret = fread(&ev->event.value, sizeof(uint32_t), 1, fin);\n\tif (ret < 1)\n\t\treturn errno;\n\n\treturn 0;\n}\n\nint read_legacy_replay_event(int fdin, replay_event_t* ev)\n{\n\tsize_t rb;\n\tchar padding[EVENT_PADDING_SIZE];\n\n\trb = read(fdin, &(ev->dev_idx), sizeof(int32_t));\n\tif (rb < (int)sizeof(int32_t)){\n\t\t//Allow for abrupt ending of legacy recordings.\n\t\tif (!errno)\n\t\t\treturn EOF;\n\t\treturn errno;\n\t}\n\trb = read(fdin, &padding, EVENT_PADDING_SIZE);\n\tif (rb < (int)sizeof(int32_t))\n\t\treturn errno;\n\n\tstruct timeval time;\n\tuint64_t temp_time;\n\trb = read(fdin, &temp_time, sizeof(uint64_t));\n\tif (rb < (int)sizeof(uint64_t))\n\t\treturn errno;\n\ttime.tv_sec = (time_t)temp_time;\n\n\trb = read(fdin, &temp_time, sizeof(uint64_t));\n\tif (rb < (int)sizeof(uint64_t))\n\t\treturn errno;\n\ttime.tv_usec = (suseconds_t)temp_time;\n\n\tev->event.time = time;\n\n\trb = read(fdin, &(ev->event.type), sizeof(uint16_t));\n\tif (rb < (int)sizeof(uint16_t))\n\t\treturn errno;\n\n\trb = read(fdin, &(ev->event.code), sizeof(uint16_t));\n\tif (rb < (int)sizeof(uint16_t))\n\t\treturn errno;\n\n\trb = read(fdin, &(ev->event.value), sizeof(int32_t));\n\tif (rb < (int)sizeof(int32_t))\n\t\treturn errno;\n\n\treturn 0;\n}\n\nint open_revent_recording(const char *filepath, revent_record_desc_t *desc, FILE **fin)\n{\n\t*fin = fopen(filepath, \"r\");\n\tif (*fin == NULL)\n\t\treturn errno;\n\n\tint ret = read_record_header(fileno(*fin), desc);\n\tif (ret)\n\t\treturn ret;\n\n\tif (desc->version < 0 || desc->version > FORMAT_VERSION)\n\t\treturn EPROTO;\n\n\treturn 0;\n}\n\nFILE *init_recording(const char *pathname, recording_mode_t mode)\n{\n\trevent_record_desc_t desc = { .mode = mode, .version = FORMAT_VERSION };\n\n\tFILE *fh = fopen(pathname, \"w\");\n\tif (fh == NULL)\n\t\treturn fh;\n\n\twrite_record_header(fileno(fh), &desc);\n\n\treturn fh;\n}\n\nvoid init_input_devices(input_devices_t *devices)\n{\n\tdevices->num = 0;\n\tdevices->max_fd = -1;\n\tdevices->paths = NULL;\n\tdevices->fds = NULL;\n}\n\nint init_general_input_devices(input_devices_t *devices)\n{\n\tuint32_t num, i, path_len;\n\tchar paths[INPDEV_MAX_DEVICES][INPDEV_MAX_PATH];\n\tint fds[INPDEV_MAX_DEVICES];\n\tint max_fd = 0;\n\tint ret;\n\tint clk_id = CLOCK_MONOTONIC;\n\n\tnum = 0;\n\tfor(i = 0; i < INPDEV_MAX_DEVICES; ++i) {\n\t\tsprintf(paths[num], \"/dev/input/event%d\", i);\n\t\tfds[num] = open(paths[num], O_RDONLY);\n\t\tif(fds[num] > 0) {\n\t\t\tif (fds[num] > max_fd)\n\t\t\t\tmax_fd = fds[num];\n\t\t\tif (ret = ioctl(fds[num], EVIOCSCLOCKID, &clk_id)) {\n\t\t\t\tdprintf(\"Failed to set monotonic clock for %s.\\n\", paths[num]);\n\t\t\t\treturn -ret;\n\t\t\t}\n\t\t\tdprintf(\"opened %s\\n\", paths[num]);\n\t\t\tnum++;\n\t\t}\n\t\telse {\n\t\t\tdprintf(\"could not open %s\\n\", paths[num]);\n\t\t}\n\t}\n\n\tif (num == 0)\n\t\treturn EACCES;\n\n\tdevices->num = num;\n\tdevices->max_fd = max_fd;\n\n\tdevices->paths = malloc(sizeof(char *) * num);\n\tif (devices->paths == NULL) {\n\t\treturn ENOMEM;\n\t}\n\tfor (i = 0; i < num; i ++) {\n\t\tpath_len = strlen(paths[i]);\n\t\tdevices->paths[i] = malloc(sizeof(char) * (path_len + 1));\n\t\tif (devices->paths[i] == NULL)\n\t\t\treturn ENOMEM;\n\t\tstrncpy(devices->paths[i], paths[i],  path_len + 1);\n\t}\n\n\tdevices->fds = malloc(sizeof(int) * num);\n\tif (devices->fds == NULL) {\n\t\treturn ENOMEM;\n\t}\n\tfor (i = 0; i < num; i ++)\n\t\tdevices->fds[i] = fds[i];\n\n\treturn 0;\n}\n\nvoid fini_general_input_devices(input_devices_t *devices)\n{\n\tint i;\n\tfor (i = 0; i < devices->num; i++) {\n\t\tif (devices->fds != NULL)\n\t\t\tclose(devices->fds[i]);\n\t\tif (devices->paths != NULL)\n\t\t\tfree(devices->paths[i]);\n\t}\n\tfree(devices->fds);\n\tdevices->num = 0;\n}\n\n\nint init_gamepad_input_devices(input_devices_t *devices, device_info_t *gamepad_info)\n{\n\tint i;\n\tchar *gamepad_path = NULL;\n\tinput_devices_t all_devices;\n\tdevice_info_t info;\n\n\tint ret = init_general_input_devices(&all_devices);\n\tif (ret) {\n\t\treturn ret;\n\t}\n\n\tfor (i = 0; i < all_devices.num; i++) {\n\t\tret = get_device_info(all_devices.fds[i], &info);\n\t\tif (ret) {\n\t\t\tdprintf(\"Could not get info for %s: %s\\n\", all_devices.paths[i], strerror(errno));\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (!is_gamepad(&info)) {\n\t\t\tdprintf(\"not a gamepad: %s\\n\", all_devices.paths[i]);\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (gamepad_path != NULL) {\n\t\t\tdie(\"More than one device identified as a gamepad (run \\\"reven info\\\" to see which)\");\n\t\t}\n\n\t\tgamepad_path = malloc(sizeof(char) * INPDEV_MAX_PATH);\n\t\tif (gamepad_path == NULL)\n\t\t\tdie(\"Could not create replay device: %s\", strerror(ENOMEM));\n\t\tstrncpy(gamepad_path, all_devices.paths[i], INPDEV_MAX_PATH);\n\t\tmemcpy(gamepad_info, &info, sizeof(device_info_t));\n\t}\n\n\tfini_general_input_devices(&all_devices);\n\n\tif (gamepad_path == NULL) {\n\t\treturn ENOMEDIUM;\n\t}\n\n\tdprintf(\"Found gamepad: %s\\n\", gamepad_path);\n\tdevices->num = 1;\n\n\tdevices->paths = malloc(sizeof(char *));\n\tdevices->paths[0] = gamepad_path;\n\n\tdevices->fds = malloc(sizeof(int *));\n\tif (devices->fds == NULL)\n\t\treturn ENOMEM;\n\tdevices->fds[0] = open(gamepad_path, O_RDONLY);\n\tif (devices->fds[0] < 0) {\n\t\treturn errno;\n\t}\n\n\tint clk_id = CLOCK_MONOTONIC;\n\tif (ret = ioctl(devices->fds[0], EVIOCSCLOCKID, &clk_id)) {\n\t\tdprintf(\"Could not set monotonic clock for the gamepad.\\n\");\n\t\treturn -ret;\n\t}\n\n\tdevices->max_fd = devices->fds[0];\n\n\treturn 0;\n}\n\nvoid fini_gamepad_input_devices(input_devices_t *devices)\n{\n\tfini_general_input_devices(devices);\n}\n\nvoid init_revent_recording(revent_recording_t *recording)\n{\n\trecording->num_events = 0;\n\trecording->desc.version = 0;\n\trecording->desc.mode = INVALID_MODE;\n\trecording->events = NULL;\n\trecording->gamepad_info = NULL;\n\tinit_input_devices(&recording->devices);\n}\n\nvoid fini_revent_recording(revent_recording_t *recording)\n{\n\tif (recording->desc.mode == GENERAL_MODE) {\n\t\tfini_general_input_devices(&recording->devices);\n\t} else if (recording->desc.mode == GAMEPAD_MODE) {\n\t\tfini_gamepad_input_devices(&recording->devices);\n\t\tfree(recording->gamepad_info);\n\t} else {\n\t\t// We're finalizing the recording so at this point,\n\t\t// we don't care.\n\t}\n\tif (recording->num_events) {\n\t\tfree(recording->events);\n\t}\n\trecording->num_events = 0;\n\trecording->desc.version = 0;\n\trecording->desc.mode = INVALID_MODE;\n}\n\nvoid open_general_input_devices_for_playback_or_die(input_devices_t *devices)\n{\n\tint i, ret;\n\tdevices->fds = malloc(sizeof(int) * devices->num);\n\tif (devices->fds == NULL)\n\t\tdie(\"Could not allocate file descriptor array: %s\", strerror(ENOMEM));\n\n\tfor (i = 0; i < devices->num; i++)\n\t{\n\t\tret = open(devices->paths[i], O_WRONLY | O_NDELAY);\n\t\tif (ret < 0) {\n\t\t\tdie(\"Could not open \\\"%s\\\" for writing: %s\",\n\t\t\t\t\tdevices->paths[i], strerror(errno));\n\t\t}\n\t\tdevices->fds[i] = ret;\n\t\tif (devices->fds[i] > devices->max_fd)\n\t\t\tdevices->max_fd =  devices->fds[i];\n\t\tdprintf(\"Opened %s\\n\", devices->paths[i]);\n\t}\n}\n\nint create_replay_device_or_die(const device_info_t *info)\n{\n\tint i;\n\n\tint fd = open(\"/dev/uinput\", O_WRONLY | O_NONBLOCK);\n\tif (fd < 0) {\n\t\tif (errno == ENOENT) {\n\t\t\tdie(\"uinput not supported by the kernel (is the module installed?)\");\n\t\t} else if (errno == EACCES) {\n\t\t\tdie(\"Cannot access \\\"/dev/uinput\\\" (try re-running as root)\");\n\t\t} else {\n\t\t\tdie(\"Could not open \\\"/dev/uinput\\\" for writing: %s\", strerror(errno));\n\t\t}\n\t}\n\n\tstruct uinput_user_dev uidev;\n\tmemset(&uidev, 0, sizeof(uidev));\n\tsnprintf(uidev.name, UINPUT_MAX_NAME_SIZE, \"revent-replay %s\", info->name);\n\tuidev.id.bustype = BUS_USB;\n\tuidev.id.vendor  = info->id.vendor;\n\tuidev.id.product = info->id.product;\n\tuidev.id.version = info->id.version;\n\n\tset_evbit(fd, EV_SYN);\n\n\tset_evbit(fd, EV_KEY);\n\tfor (i = 0; i < KEY_MAX; i++) {\n\t\tif (test_bit(info->key_bits, i))\n\t\t\tset_keybit(fd, i);\n\t}\n\n\tset_evbit(fd, EV_REL);\n\tfor (i = 0; i < REL_MAX; i++) {\n\t\tif (test_bit(info->rel_bits, i))\n\t\t\tset_relbit(fd, i);\n\t}\n\n\tset_evbit(fd, EV_ABS);\n\tfor (i = 0; i < info->num_absinfo; i++) {\n\t\tint ev_code = info->absinfo[i].ev_code;\n\t\tset_absbit(fd, ev_code);\n\t\tuidev.absmin[ev_code] = info->absinfo[i].absinfo.minimum;\n\t\tuidev.absmax[ev_code] = info->absinfo[i].absinfo.maximum;\n\t\tuidev.absfuzz[ev_code] = info->absinfo[i].absinfo.fuzz;\n\t\tuidev.absflat[ev_code] = info->absinfo[i].absinfo.flat;\n\t}\n\tif (write(fd, &uidev, sizeof(uidev)) < sizeof(uidev)) {\n\t\tdie(\"Could not write absinfo:\", strerror(errno));\n\t}\n\n\tif(ioctl(fd, UI_DEV_CREATE) < 0)\n\t\tdie(\"Could not create replay device:\", strerror(errno));\n\n        // wait for the new device to be recognised by the system\n        sleep(3);\n\n\treturn fd;\n}\n\ninline void read_revent_recording_or_die(const char *filepath, revent_recording_t *recording)\n{\n\tint ret;\n\tFILE *fin;\n\tuint64_t i;\n\toff_t fsize;\n\n\tret = open_revent_recording(filepath, &recording->desc, &fin);\n\tif (ret) {\n\t\tif (ret == EINVAL) {\n\t\t\tdie(\"%s does not appear to be an revent recording\", filepath);\n\t\t} else if (ret == EPROTO) {\n\t\t\tdie(\"%s contains recording for unsupported version \\\"%u\\\"; max supported version is \\\"%u\\\"\",\n\t\t\t\t\tfilepath, recording->desc.version, FORMAT_VERSION);\n\t\t} else  {\n\t\t\tdie(\"%s revent recording appears to be corrupted\", filepath);\n\t\t}\n\t}\n\n\tif (recording->desc.mode == GENERAL_MODE) {\n\t\tret = read_general_input_devices(&recording->devices, fin);\n\t\tif (ret) {\n\t\t\tdie(\"Could not read devices: %s\", strerror(ret));\n\t\t}\n\t\trecording->gamepad_info = NULL;\n\t} else if (recording->desc.mode == GAMEPAD_MODE) {\n\t\trecording->gamepad_info = malloc(sizeof(device_info_t));\n\t\tif (recording->gamepad_info == NULL)\n\t\t\tdie(\"Could not allocate gamepad info buffer: %s\", strerror(ENOMEM));\n\t\tret = read_device_info(fin, recording->gamepad_info);\n\t\tif (ret)\n\t\t\tdie(\"Could not read gamepad info: %s\", strerror(ret));\n\t} else {\n\t\tdie(\"Unexpected recording mode: %d\", recording->desc.mode);\n\t}\n\n\tif (recording->desc.version > 1) {\n\t\tret = fread(&recording->num_events, sizeof(uint64_t), 1, fin);\n\t\tif (ret < 1)\n\t\t\tdie(\"Could not read the number of recorded events\");\n\n\t\tif (recording->desc.version > 2) {\n\t\t\tret = read_record_timestamps(fin, recording);\n\t\t\tif (ret)\n\t\t\t\tdie(\"Could not read recroding timestamps.\");\n\t\t}\n\n\t\trecording->events = malloc(sizeof(replay_event_t) * recording->num_events);\n\t\tif (recording->events == NULL)\n\t\t\tdie(\"Not enough memory to allocate replay buffer\");\n\n\t\t// start/end times tracking for recording as a whole was added in version 3\n\t\t// of recording format; for earlier recordings, use timestamps of the first and\n\t\t// last events.\n\t\tread_replay_event(fin, &recording->events[0]);\n\t\tif (recording->desc.version <= 2) {\n\t\t\trecording->start_time.tv_sec  = recording->events[0].event.time.tv_sec;\n\t\t\trecording->start_time.tv_usec  = recording->events[0].event.time.tv_usec;\n\t\t}\n\n\t\tfor(i=1; i < recording->num_events; i++) {\n\t\t\tread_replay_event(fin, &recording->events[i]);\n\t\t}\n\n\t\tif (recording->desc.version <= 2) {\n\t\t\trecording->end_time.tv_sec  = recording->events[i].event.time.tv_sec;\n\t\t\trecording->end_time.tv_usec  = recording->events[i].event.time.tv_usec;\n\t\t}\n\t} else {   // backwards compatibility\n\t\t/* Prior to verion 2, the total number of recorded events was not being\n\t\t * written as part of the recording. We will use the size of the file on\n\t\t * disk to estimate the recording buffer size and keep reading the events\n\t\t * untils EOF, keeping track of how many we read so that the total can\n\t\t * then be updated. The format of the events is also different -- it\n\t\t * featured larger device ID an unnecessary padding.\n\t\t */\n\t\t fsize  = get_file_size(filepath);\n\t\t recording->events = malloc((size_t)fsize);\n\t\t i = 0;\n\n\t\t// Safely get file descriptor for fin, by flushing first.\n\t\tfflush(fin);\n\n\t\t while (1) {\n\t\t\tret = read_legacy_replay_event(fileno(fin), &recording->events[i]);\n\t\t\tif (ret == EOF) {\n\t\t\t\tbreak;\n\t\t\t} else if (ret) {\n\t\t\t\tdie(\"error reading events: %s\", strerror(ret));\n\t\t\t}\n\t\t\ti++;\n\t\t }\n\t\t recording->num_events = i;\n\t}\n\n\tfclose(fin);\n}\n\nvoid open_gamepad_input_devices_for_playback_or_die(input_devices_t *devices, const device_info_t *info)\n{\n\tint fd = create_replay_device_or_die(info);\n\tdevices->num = 1;\n\tdevices->fds = malloc(sizeof(int));\n\tif (devices->fds == NULL)\n\t\tdie(\"Could not create replay devices: %s\", strerror(ENOMEM));\n\tdevices->fds[0] = fd;\n\tdevices->max_fd = fd;\n}\n\n//Used to exit program properly on termination\nstatic volatile int EXIT = 0;\nvoid exitHandler(int z) {\n    EXIT = 1;\n}\n\nvoid record(const char *filepath, int delay, recording_mode_t mode)\n{\n\tint ret;\n\tstruct timespec start_time, end_time;\n\tFILE *fout = init_recording(filepath, mode);\n\tif (fout == NULL)\n\t\tdie(\"Could not create recording \\\"%s\\\": %s\", filepath, strerror(errno));\n\n\tinput_devices_t devices;\n\tinit_input_devices(&devices);\n\n\tif (mode == GENERAL_MODE) {\n\t\tret = init_general_input_devices(&devices);\n\t\tif (ret)\n\t\t\tdie(\"Could not initialize input devices: %s\", strerror(ret));\n\t\tret = write_general_input_devices(&devices, fout);\n\t\tif (ret)\n\t\t\tdie(\"Could not record input devices: %s\", strerror(ret));\n\t} else if (mode == GAMEPAD_MODE) {\n\t\tdevice_info_t info;\n\t\tret = init_gamepad_input_devices(&devices, &info);\n\t\tif (ret == ENOMEDIUM) {\n\t\t\tdie(\"There does not appear to be a gamepad connected\");\n\t\t} else if (ret) {\n\t\t\tdie(\"Problem initializing gamepad device: %s\", strerror(ret));\n\t\t}\n\t\tret = write_device_info(fout, &info);\n\t\tif (ret)\n\t\t\tdie(\"Problem writing gamepad info: %s\", strerror(ret));\n\t} else {\n\t\tfclose(fout);\n\t\tdie(\"Invalid recording mode specified\");\n\t}\n\n\tsigset_t old_sigset;\n\tsigemptyset(&old_sigset);\n\tblock_sigterm(&old_sigset);\n\n\t// Write the zero size as a place holder and remember the position in the\n\t// file stream, so that it may be updated at the end with the actual event\n\t// count. Reserving space for five uint64_t's -- the number of events and\n\t// end time stamps.\n\tuint64_t event_count = 0;\n\tlong size_pos = ftell(fout);\n\tret = fwrite(&event_count, sizeof(uint64_t), 5, fout);\n\tif (ret < 1)\n\t\tdie(\"Could not initialise event count: %s\", strerror(errno));\n\n\tchar padding[EVENT_PADDING_SIZE];\n\tbzero(padding, EVENT_PADDING_SIZE);\n\n\tfd_set readfds;\n\tstruct timespec tout;\n\treplay_event_t rev;\n\tint32_t maxfd = 0;\n\tint32_t keydev = 0;\n\tint i;\n\tprintf(\"recording...\\n\");\n\n\terrno = 0;\n\tsignal(SIGINT, exitHandler);\n\t\n\tclock_gettime(CLOCK_MONOTONIC, &start_time);\n\twhile(1)\n\t{\n\t\tFD_ZERO(&readfds);\n\t\tFD_SET(STDIN_FILENO, &readfds);\n\t\tfor (i=0; i < devices.num; i++)\n\t\t\tFD_SET(devices.fds[i], &readfds);\n\n\t\t/* wait for input */\n\t\ttout.tv_sec = delay;\n\t\ttout.tv_nsec = 0;\n\n\t\tret = pselect(devices.max_fd + 1, &readfds, NULL, NULL, &tout, &old_sigset);\n\n\t\tif (EXIT){\n\t\t\tbreak;\n\t\t}\n\t\tif (errno == EINTR){\n\t\t\tbreak;\n\t\t}\n\t\tif (!ret){\n\t\t\tbreak;\n\t\t}\n\n\t\tif (wait_for_stdin && FD_ISSET(STDIN_FILENO, &readfds)) {\n\t\t\t// in this case the key down for the return key will be recorded\n\t\t\t// so we need to up the key up\n\t\t\tmemset(&rev, 0, sizeof(rev));\n\t\t\trev.dev_idx = keydev;\n\t\t\trev.event.type = EV_KEY;\n\t\t\trev.event.code = KEY_ENTER;\n\t\t\trev.event.value = 0;\n\t\t\tgettimeofday(&rev.event.time, NULL);\n\t\t\twrite_replay_event(fout, &rev);\n\n\t\t\t// syn\n\t\t\tmemset(&rev, 0, sizeof(rev));\n\t\t\trev.dev_idx = keydev;\n\t\t\trev.event.type = EV_SYN;\n\t\t\trev.event.code = 0;\n\t\t\trev.event.value = 0;\n\t\t\tgettimeofday(&rev.event.time, NULL);\n\t\t\twrite_replay_event(fout, &rev);\n\n\t\t\tdprintf(\"added fake return exiting...\\n\");\n\t\t\tbreak;\n\t\t}\n\n\t\tfor (i = 0; i < devices.num; i++)\n\t\t{\n\t\t\tif (FD_ISSET(devices.fds[i], &readfds))\n\t\t\t{\n\t\t\t\tdprintf(\"got event from %s\\n\", devices.paths[i]);\n\t\t\t\tmemset(&rev, 0, sizeof(rev));\n\t\t\t\trev.dev_idx = i;\n\t\t\t\tret = read(devices.fds[i], (void *)&rev.event, sizeof(rev.event));\n\t\t\t\tdprintf(\"%d event: type %d code %d value %d\\n\",\n\t\t\t\t\t\t(unsigned int)ret, rev.event.type, rev.event.code, rev.event.value);\n\t\t\t\tif (rev.event.type == EV_KEY && rev.event.code == KEY_ENTER && rev.event.value == 1)\n\t\t\t\t\tkeydev = i;\n\t\t\t\twrite_replay_event(fout, &rev);\n\t\t\t\tevent_count++;\n\t\t\t}\n\t\t}\n\t}\n\tclock_gettime(CLOCK_MONOTONIC, &end_time);\n\n\tdprintf(\"Writing event count...\\n\");\n\tif ((ret = fseek(fout, size_pos, SEEK_SET)) == -1)\n\t\tdie(\"Could not write event count: %s\", strerror(errno));\n\tret = fwrite(&event_count, sizeof(uint64_t), 1, fout);\n\tif (ret < 1)\n\t\tdie(\"Could not write event count: %s\", strerror(errno));\n\tdprintf(\"Writing recording timestamps...\\n\");\n\tuint64_t secs, usecs;\n\tsecs = start_time.tv_sec;\n\tfwrite(&secs, sizeof(uint64_t), 1, fout);\n\tusecs = start_time.tv_nsec / 1000;\n\tfwrite(&usecs, sizeof(uint64_t), 1, fout);\n\tsecs = end_time.tv_sec;\n\tfwrite(&secs, sizeof(uint64_t), 1, fout);\n\tusecs = end_time.tv_nsec / 1000;\n\tret = fwrite(&usecs, sizeof(uint64_t), 1, fout);\n\tif (ret < 1)\n\t\tdie(\"Could not write recording timestamps: %s\\n\", strerror(errno));\n\n\tfclose(fout);\n\tdprintf(\"Recording complete.\\n\");\n\n\tif (mode == GENERAL_MODE) {\n\t\tfini_general_input_devices(&devices);\n\t} else if (mode == GAMEPAD_MODE) {\n\t\tfini_gamepad_input_devices(&devices);\n\t} else {\n\t\t// Should never get here, as would have failed at the beginning\n\t\tdie(\"Unexpected mode on finish\");\n\t}\n}\n\nvoid dump(const char *filepath)\n{\n\tint i, ret = 0;\n\trevent_recording_t recording;\n\tinit_revent_recording(&recording);\n\n\tread_revent_recording_or_die(filepath, &recording);\n\tprintf(\"recording version: %u\\n\", recording.desc.version);\n\tprintf(\"recording type: %i\\n\", recording.desc.mode);\n\tprintf(\"number of recorded events: %lu\\n\", recording.num_events);\n\tprintf(\"start time: %ld.%06ld \\n\", recording.start_time.tv_sec, recording.start_time.tv_usec);\n\tprintf(\"end time:   %ld.%06ld \\n\", recording.end_time.tv_sec, recording.end_time.tv_usec);\n\n\tprintf(\"\\n\");\n\tif (recording.desc.mode == GENERAL_MODE) {\n\t\tprintf(\"devices:\\n\");\n\t\tfor (i = 0; i < recording.devices.num; i++) {\n\t\t\tprintf(\"%2i: %s\\n\", i, recording.devices.paths[i]);\n\t\t}\n\t} else if (recording.desc.mode == GAMEPAD_MODE) {\n\t\tprint_device_info(recording.gamepad_info);\n\t} else {\n\t\tdie(\"Unexpected recording type: %d\", recording.desc.mode);\n\t}\n\n\tprintf(\"\\nevents:\\n\");\n\tfor (i =0; i < recording.num_events; i++) {\n\t\tprintf(\"%ld.%06ld dev: %d type: %d code: %d value %d\\n\",\n\t\t\t\trecording.events[i].event.time.tv_sec,\n\t\t\t\trecording.events[i].event.time.tv_usec,\n\t\t\t\trecording.events[i].dev_idx,\n\t\t\t\trecording.events[i].event.type,\n\t\t\t\trecording.events[i].event.code,\n\t\t\t\trecording.events[i].event.value\n\t\t      );\n\t}\n\n\tfini_revent_recording(&recording);\n}\n\nvoid replay(const char *filepath)\n{\n\trevent_recording_t recording;\n\tinit_revent_recording(&recording);\n\n\tread_revent_recording_or_die(filepath, &recording);\n\tswitch (recording.desc.mode) {\n\tcase GENERAL_MODE:\n\t\tdprintf(\"Opening input devices for playback\\n\");\n\t\topen_general_input_devices_for_playback_or_die(&recording.devices);\n\t\tbreak;\n\tcase GAMEPAD_MODE:\n\t\tdprintf(\"Creating gamepad playback device\\n\");\n\t\topen_gamepad_input_devices_for_playback_or_die(&recording.devices, recording.gamepad_info);\n\t\tbreak;\n\tdefault:\n\t\tdie(\"Unexpected recording mod: %d\", recording.desc.mode);\n\t}\n\tdprintf(\"Adjusting timestamps\\n\");\n\tadjust_timestamps(&recording);\n\n\tstruct timeval start_time, now, desired_time, last_event_delta, delta;\n\tbzero(&last_event_delta, sizeof(struct timeval));\n\tgettimeofday(&start_time, NULL);\n\n\tint ret;\n\tuint64_t i = 0;\n\tdprintf(\"Starting payback\\n\");\n\twhile (i < recording.num_events) {\n\t\tgettimeofday(&now, NULL);\n\t\ttimeradd(&start_time, &last_event_delta, &desired_time);\n\n\t\tif (timercmp(&desired_time, &now, >)) {\n\t\t\ttimersub(&desired_time, &now, &delta);\n\t\t\tuseconds_t d = (useconds_t)delta.tv_sec * 1000000 + delta.tv_usec;\n\t\t\tdprintf(\"now %u.%u desiredtime %u.%u sleeping %u uS\\n\",\n\t\t\t\t\t(unsigned int)now.tv_sec,\n\t\t\t\t\t(unsigned int)now.tv_usec,\n\t\t\t\t\t(unsigned int)desired_time.tv_sec,\n\t\t\t\t\t(unsigned int)desired_time.tv_usec,\n\t\t\t\t\td);\n\t\t\tusleep(d);\n\t\t}\n\n\t\tint32_t idx = (recording.events[i]).dev_idx;\n\t\tstruct input_event ev = (recording.events[i]).event;\n\t\twhile(!timercmp(&ev.time, &last_event_delta, !=)) {\n\t\t\tret = write(recording.devices.fds[idx], &ev, sizeof(ev));\n\t\t\tif (ret != sizeof(ev))\n\t\t\t\tdie(\"Could not replay event\");\n\t\t\tdprintf(\"replayed event: type %d code %d value %d\\n\", ev.type, ev.code, ev.value);\n\n\t\t\ti++;\n\t\t\tif (i >= recording.num_events) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tidx = recording.events[i].dev_idx;\n\t\t\tev = recording.events[i].event;\n\t\t}\n\t\tlast_event_delta = ev.time;\n\t}\n\n\ttimeradd(&start_time, &recording.end_time, &desired_time);\n\tgettimeofday(&now, NULL);\n\tif (timercmp(&desired_time, &now, >)) {\n\t\ttimersub(&desired_time, &now, &delta);\n\t\tuseconds_t d = (useconds_t)delta.tv_sec * 1000000 + delta.tv_usec;\n\t\tdprintf(\"now %u.%u recording end time %u.%u; sleeping %u uS\\n\",\n\t\t\t\t(unsigned int)now.tv_sec,\n\t\t\t\t(unsigned int)now.tv_usec,\n\t\t\t\t(unsigned int)desired_time.tv_sec,\n\t\t\t\t(unsigned int)desired_time.tv_usec,\n\t\t\t\td);\n\t\tusleep(d);\n\t}\n\telse {\n\t\tdprintf(\"now %u.%u recording end time %u.%u; no need to sleep\\n\",\n\t\t\t\t(unsigned int)now.tv_sec,\n\t\t\t\t(unsigned int)now.tv_usec,\n\t\t\t\t(unsigned int)desired_time.tv_sec,\n\t\t\t\t(unsigned int)desired_time.tv_usec);\n\t}\n\tdprintf(\"Playback complete\\n\");\n\n        if (recording.desc.mode == GAMEPAD_MODE)\n\t\tdestroy_replay_device(recording.devices.fds[0]);\n\tfini_revent_recording(&recording);\n}\n\nvoid info(void)\n{\n\tinput_devices_t devices;\n\tinit_input_devices(&devices);\n\n\tint ret = init_general_input_devices(&devices);\n\tif (ret) {\n\t\tdie(\"Could not read input devices: %s\", strerror(errno));\n\t}\n\n\tint i;\n\tdevice_info_t info;\n\tfor (i = 0; i < devices.num; i++) {\n\t\tret = get_device_info(devices.fds[i], &info);\n\t\tif (ret) {\n\t\t\tprintf(\"Could not get info for %s: %s\\n\", devices.paths[i], strerror(errno));\n\t\t\tcontinue;\n\t\t}\n\n\t\tprintf(\"DEVICE %d\\n\", i);\n\t\tprintf(\"device path: %s\\n\", devices.paths[i]);\n\t\tprintf(\"is gamepad: %s\\n\", is_gamepad(&info) ? \"yes\" : \"no\");\n\t\tprint_device_info(&info);\n\t\tprintf(\"\\n\");\n\t}\n\n\tfini_general_input_devices(&devices);\n}\n\nvoid usage()\n{\n\tprintf(\"usage:\\n    revent [-h] [-v] COMMAND [OPTIONS] \\n\"\n\t\t\t\"\\n\"\n\t\t\t\"    Options:\\n\"\n\t\t\t\"        -h  print this help message and quit.\\n\"\n\t\t\t\"        -v  enable verbose output.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"    Commands:\\n\"\n\t\t\t\"        record [-t SECONDS] [-d DEVICE] FILE\\n\"\n\t\t\t\"            Record input event. stops after return on STDIN (or, optionally, \\n\"\n\t\t\t\"            a fixed delay)\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"                FILE       file into which events will be recorded.\\n\"\n\t\t\t\"                -t SECONDS time, in seconds, for which to record events.\\n\"\n\t\t\t\"                           if not specified, recording will continue until\\n\"\n\t\t\t\"                           return key is pressed.\\n\"\n\t\t\t\"                -d DEVICE  the number of the input device form which\\n\"\n\t\t\t\"                           events will be recorded. If not specified, \\n\"\n\t\t\t\"                           all available inputs will be used.\\n\"\n\t\t\t\"                -s         Recording will not be stopped if there is \\n\"\n\t\t\t\"                           input on STDIN.\\n\"\n\t\t\t\"                -g         Record in \\\"gamepad\\\" mode. A gamepad must be \\n\"\n\t\t\t\"                           connected to the device. The recording will only\\n\"\n\t\t\t\"                           be done for the gamepad and other input devices\\n\"\n\t\t\t\"                           will not be recorded. In addition to the input\\n\"\n\t\t\t\"                           events, the information about the gamepad will\\n\"\n\t\t\t\"                           also be stored in the recording. When this\\n\"\n\t\t\t\"                           recording is played back, revent will first\\n\"\n\t\t\t\"                           create a virtual gamepad device based on the\\n\"\n\t\t\t\"                           stored info and the event will be played back\\n\"\n\t\t\t\"                           into it. This type of recording should be more\\n\"\n\t\t\t\"                           portable across different devices.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"        replay FILE\\n\"\n\t\t\t\"            replays previously recorded events from the specified file.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"                FILE       file into which events will be recorded.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"        dump FILE\\n\"\n\t\t\t\"            dumps the contents of the specified event log to STDOUT in\\n\"\n\t\t\t\"            human-readable form.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"                FILE       event log which will be dumped.\\n\"\n\t\t\t\"\\n\"\n\t\t\t\"        info\\n\"\n\t\t\t\"             shows info about each event char device\\n\"\n\t\t\t\"\\n\"\n\t\t\t);\n}\n\nvoid revent_args_init(revent_args_t **rargs, int argc, char** argv)\n{\n\t*rargs = malloc(sizeof(revent_args_t));\n\trevent_args_t *revent_args = *rargs;\n\trevent_args->command = INVALID_COMMAND;\n\trevent_args->mode = GENERAL_MODE;\n\trevent_args->record_time = INT_MAX;\n\trevent_args->device_number = -1;\n\trevent_args->file = NULL;\n\n\tint opt;\n\twhile ((opt = getopt(argc, argv, \"hgt:d:vs\")) != -1)\n\t{\n\t\tswitch (opt) {\n\t\t\tcase 'h':\n\t\t\t\tusage();\n\t\t\t\texit(0);\n\t\t\t\tbreak;\n\t\t\tcase 'g':\n\t\t\t\trevent_args->mode = GAMEPAD_MODE;\n\t\t\t\tbreak;\n\t\t\tcase 't':\n\t\t\t\tif (is_numeric(optarg)) {\n\t\t\t\t\trevent_args->record_time = atoi(optarg);\n\t\t\t\t\tdprintf(\"timeout: %d\\n\", revent_args->record_time);\n\t\t\t\t} else {\n\t\t\t\t\tdie(\"-t parameter must be numeric; got %s.\", optarg);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'd':\n\t\t\t\tif (is_numeric(optarg)) {\n\t\t\t\t\trevent_args->device_number = atoi(optarg);\n\t\t\t\t\tdprintf(\"device: %d\\n\", revent_args->device_number);\n\t\t\t\t} else {\n\t\t\t\t\tdie(\"-d parameter must be numeric; got %s.\", optarg);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'v':\n\t\t\t\tverbose = TRUE;\n\t\t\t\tbreak;\n\t\t\tcase 's':\n\t\t\t\twait_for_stdin = FALSE;\n\t\t\t\tbreak;\n\n\t\t\tdefault:\n\t\t\t\tdie(\"Unexpected option: %c\", opt);\n\t\t}\n\t}\n\n\tint next_arg = optind;\n\tif (next_arg == argc) {\n\t\tusage();\n\t\tdie(\"Must specify a command.\");\n\t}\n\tif (!strcmp(argv[next_arg], \"record\"))\n\t\trevent_args->command = RECORD_COMMAND;\n\telse if (!strcmp(argv[next_arg], \"replay\"))\n\t\trevent_args->command = REPLAY_COMMAND;\n\telse if (!strcmp(argv[next_arg], \"dump\"))\n\t\trevent_args->command = DUMP_COMMAND;\n\telse if (!strcmp(argv[next_arg], \"info\"))\n\t\trevent_args->command = INFO_COMMAND;\n\telse {\n\t\tusage();\n\t\tdie(\"Unknown command -- %s\", argv[next_arg]);\n\t}\n\tnext_arg++;\n\n\tif (next_arg != argc) {\n\t\trevent_args->file = argv[next_arg];\n\t\tdprintf(\"file: %s\\n\", revent_args->file);\n\t\tnext_arg++;\n\t\tif (next_arg != argc) {\n\t\t\tdie(\"Trailling arguments (use -h for help).\");\n\t\t}\n\t}\n\n\tif ((revent_args->command != RECORD_COMMAND) && (revent_args->record_time != INT_MAX)) {\n\t\tdie(\"-t parameter is only valid for \\\"record\\\" command.\");\n\t}\n\tif ((revent_args->command != RECORD_COMMAND) && (revent_args->device_number != -1)) {\n\t\tdie(\"-d parameter is only valid for \\\"record\\\" command.\");\n\t}\n\tif ((revent_args->command == INFO_COMMAND) && (revent_args->file != NULL)) {\n\t\tdie(\"File path cannot be specified for \\\"info\\\" command.\");\n\t}\n\tif (((revent_args->command == RECORD_COMMAND) || (revent_args->command == REPLAY_COMMAND))\n\t\t\t&& (revent_args->file == NULL)) {\n\t\tdie(\"Must specify a file for recording/replaying (use -h for help).\");\n\t}\n}\n\nint revent_args_close(revent_args_t *rargs)\n{\n\tfree(rargs);\n\treturn 0;\n}\n\nint main(int argc, char** argv)\n{\n\tint i;\n\tchar *logfile = NULL;\n\trevent_args_t *rargs = NULL;\n\n\trevent_args_init(&rargs, argc, argv);\n\n\tswitch(rargs->command) {\n\t\tcase RECORD_COMMAND:\n\t\t\trecord(rargs->file, rargs->record_time, rargs->mode);\n\t\t\tbreak;\n\t\tcase REPLAY_COMMAND:\n\t\t\treplay(rargs->file);\n\t\t\tbreak;\n\t\tcase DUMP_COMMAND:\n\t\t\tdump(rargs->file);\n\t\t\tbreak;\n\t\tcase INFO_COMMAND:\n\t\t\tinfo();\n\t\t\tbreak;\n\t\tdefaut:\n\t\t\tdie(\"Unexpected revent command: %d\", rargs->command);\n\t};\n\n\trevent_args_close(rargs);\n\treturn 0;\n}\n"
  },
  {
    "path": "wa/utils/__init__.py",
    "content": ""
  },
  {
    "path": "wa/utils/android.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport logging\nimport os\nfrom datetime import datetime\nfrom shlex import quote\n\nfrom devlib.utils.android import ApkInfo as _ApkInfo\n\nfrom wa.framework.configuration import settings\nfrom wa.utils.serializer import read_pod, write_pod, Podable\nfrom wa.utils.types import enum\nfrom wa.utils.misc import atomic_write_path\n\n\nLogcatLogLevel = enum(['verbose', 'debug', 'info', 'warn', 'error', 'assert'], start=2)\n\nlog_level_map = ''.join(n[0].upper() for n in LogcatLogLevel.names)\n\nlogcat_logger = logging.getLogger('logcat')\napk_info_cache_logger = logging.getLogger('apk_info_cache')\n\napk_info_cache = None\n\n\nclass LogcatEvent(object):\n\n    __slots__ = ['timestamp', 'pid', 'tid', 'level', 'tag', 'message']\n\n    def __init__(self, timestamp, pid, tid, level, tag, message):\n        self.timestamp = timestamp\n        self.pid = pid\n        self.tid = tid\n        self.level = level\n        self.tag = tag\n        self.message = message\n\n    def __repr__(self):\n        return '{} {} {} {} {}: {}'.format(\n            self.timestamp, self.pid, self.tid,\n            self.level.name.upper(), self.tag,\n            self.message,\n        )\n\n    __str__ = __repr__\n\n\nclass LogcatParser(object):\n\n    def parse(self, filepath):\n        with open(filepath, errors='replace') as fh:\n            for line in fh:\n                event = self.parse_line(line)\n                if event:\n                    yield event\n\n    def parse_line(self, line):  # pylint: disable=no-self-use\n        line = line.strip()\n        if not line or line.startswith('-') or ': ' not in line:\n            return None\n\n        metadata, message = line.split(': ', 1)\n\n        parts = metadata.split(None, 5)\n        try:\n            ts = ' '.join([parts.pop(0), parts.pop(0)])\n            timestamp = datetime.strptime(ts, '%m-%d %H:%M:%S.%f').replace(year=datetime.now().year)\n            pid = int(parts.pop(0))\n            tid = int(parts.pop(0))\n            level = LogcatLogLevel.levels[log_level_map.index(parts.pop(0))]\n            tag = (parts.pop(0) if parts else '').strip()\n        except Exception as e:  # pylint: disable=broad-except\n            message = 'Invalid metadata for line:\\n\\t{}\\n\\tgot: \"{}\"'\n            logcat_logger.warning(message.format(line, e))\n            return None\n\n        return LogcatEvent(timestamp, pid, tid, level, tag, message)\n\n\n# pylint: disable=protected-access,attribute-defined-outside-init\nclass ApkInfo(_ApkInfo, Podable):\n    '''Implement ApkInfo as a Podable class.'''\n\n    _pod_serialization_version = 1\n\n    @staticmethod\n    def from_pod(pod):\n        instance = ApkInfo()\n        instance.path = pod['path']\n        instance.package = pod['package']\n        instance.activity = pod['activity']\n        instance.label = pod['label']\n        instance.version_name = pod['version_name']\n        instance.version_code = pod['version_code']\n        instance.native_code = pod['native_code']\n        instance.permissions = pod['permissions']\n        instance._apk_path = pod['_apk_path']\n        instance._activities = pod['_activities']\n        instance._methods = pod['_methods']\n        return instance\n\n    def __init__(self, path=None):\n        super().__init__(path)\n        self._pod_version = self._pod_serialization_version\n\n    def to_pod(self):\n        pod = super().to_pod()\n        pod['path'] = self.path\n        pod['package'] = self.package\n        pod['activity'] = self.activity\n        pod['label'] = self.label\n        pod['version_name'] = self.version_name\n        pod['version_code'] = self.version_code\n        pod['native_code'] = self.native_code\n        pod['permissions'] = self.permissions\n        pod['_apk_path'] = self._apk_path\n        pod['_activities'] = self.activities  # Force extraction\n        pod['_methods'] = self.methods  # Force extraction\n        return pod\n\n    @staticmethod\n    def _pod_upgrade_v1(pod):\n        pod['_pod_version'] = pod.get('_pod_version', 1)\n        return pod\n\n\nclass ApkInfoCache:\n\n    @staticmethod\n    def _check_env():\n        if not os.path.exists(settings.cache_directory):\n            os.makedirs(settings.cache_directory)\n\n    def __init__(self, path=settings.apk_info_cache_file):\n        self._check_env()\n        self.path = path\n        self.last_modified = None\n        self.cache = {}\n        self._update_cache()\n\n    def store(self, apk_info, apk_id, overwrite=True):\n        self._update_cache()\n        if apk_id in self.cache and not overwrite:\n            raise ValueError('ApkInfo for {} is already in cache.'.format(apk_info.path))\n        self.cache[apk_id] = apk_info.to_pod()\n        with atomic_write_path(self.path) as at_path:\n            write_pod(self.cache, at_path)\n        self.last_modified = os.stat(self.path)\n\n    def get_info(self, key):\n        self._update_cache()\n        pod = self.cache.get(key)\n\n        info = ApkInfo.from_pod(pod) if pod else None\n        return info\n\n    def _update_cache(self):\n        if not os.path.exists(self.path):\n            return\n        if self.last_modified != os.stat(self.path):\n            apk_info_cache_logger.debug('Updating cache {}'.format(self.path))\n            self.cache = read_pod(self.path)\n            self.last_modified = os.stat(self.path)\n\n\ndef get_cacheable_apk_info(path):\n    # pylint: disable=global-statement\n    global apk_info_cache\n    if not path:\n        return\n    stat = os.stat(path)\n    modified = stat.st_mtime\n    apk_id = '{}-{}'.format(path, modified)\n    info = apk_info_cache.get_info(apk_id)\n\n    if info:\n        msg = 'Using ApkInfo ({}) from cache'.format(info.package)\n    else:\n        info = ApkInfo(path)\n        apk_info_cache.store(info, apk_id, overwrite=True)\n        msg = 'Storing ApkInfo ({}) in cache'.format(info.package)\n    apk_info_cache_logger.debug(msg)\n    return info\n\n\napk_info_cache = ApkInfoCache()\n\n\ndef build_apk_launch_command(package, activity=None, apk_args=None):\n    args_string = ''\n    if apk_args:\n        for k, v in apk_args.items():\n            if isinstance(v, str):\n                arg = '--es'\n                v = quote(v)\n            elif isinstance(v, float):\n                arg = '--ef'\n            elif isinstance(v, bool):\n                arg = '--ez'\n            elif isinstance(v, int):\n                arg = '--ei'\n            else:\n                raise ValueError('Unable to encode {} {}'.format(v, type(v)))\n\n            args_string = '{} {} {} {}'.format(args_string, arg, k, v)\n\n    if not activity:\n        cmd = 'am start -W {} {}'.format(package, args_string)\n    else:\n        cmd = 'am start -W -n {}/{} {}'.format(package, activity, args_string)\n\n    return cmd\n"
  },
  {
    "path": "wa/utils/cpustates.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\nimport re\nimport logging\nfrom ctypes import c_int32\nfrom collections import defaultdict\n\nfrom devlib.utils.csvutil import create_writer, csvwriter\n\nfrom wa.utils.trace_cmd import TraceCmdParser, trace_has_marker, TRACE_MARKER_START, TRACE_MARKER_STOP\n\n\nlogger = logging.getLogger('cpustates')\n\nINIT_CPU_FREQ_REGEX = re.compile(r'CPU (?P<cpu>\\d+) FREQUENCY: (?P<freq>\\d+) kHZ')\nDEVLIB_CPU_FREQ_REGEX = re.compile(r'cpu_frequency(?:_devlib):\\s+state=(?P<freq>\\d+)\\s+cpu_id=(?P<cpu>\\d+)')\n\n\nclass CorePowerTransitionEvent(object):\n\n    kind = 'transition'\n    __slots__ = ['timestamp', 'cpu_id', 'frequency', 'idle_state']\n\n    def __init__(self, timestamp, cpu_id, frequency=None, idle_state=None):\n        if (frequency is None) == (idle_state is None):\n            raise ValueError('Power transition must specify a frequency or an idle_state, but not both.')\n        self.timestamp = timestamp\n        self.cpu_id = cpu_id\n        self.frequency = frequency\n        self.idle_state = idle_state\n\n    def __str__(self):\n        return 'cpu {} @ {} -> freq: {} idle: {}'.format(self.cpu_id, self.timestamp,\n                                                         self.frequency, self.idle_state)\n\n    def __repr__(self):\n        return 'CPTE(c:{} t:{} f:{} i:{})'.format(self.cpu_id, self.timestamp,\n                                                  self.frequency, self.idle_state)\n\n\nclass CorePowerDroppedEvents(object):\n\n    kind = 'dropped_events'\n    __slots__ = ['cpu_id']\n\n    def __init__(self, cpu_id):\n        self.cpu_id = cpu_id\n\n    def __str__(self):\n        return 'DROPPED EVENTS on CPU{}'.format(self.cpu_id)\n\n    __repr__ = __str__\n\n\nclass TraceMarkerEvent(object):\n\n    kind = 'marker'\n    __slots__ = ['name']\n\n    def __init__(self, name):\n        self.name = name\n\n    def __str__(self):\n        return 'MARKER: {}'.format(self.name)\n\n\nclass CpuPowerState(object):\n\n    __slots__ = ['frequency', 'idle_state']\n\n    @property\n    def is_idling(self):\n        return self.idle_state is not None and self.idle_state >= 0\n\n    @property\n    def is_active(self):\n        return self.idle_state == -1\n\n    def __init__(self, frequency=None, idle_state=None):\n        self.frequency = frequency\n        self.idle_state = idle_state\n\n    def __str__(self):\n        return 'CP(f:{} i:{})'.format(self.frequency, self.idle_state)\n\n    __repr__ = __str__\n\n\nclass SystemPowerState(object):\n\n    __slots__ = ['timestamp', 'cpus']\n\n    @property\n    def num_cores(self):\n        return len(self.cpus)\n\n    def __init__(self, num_cores, no_idle=False):\n        self.timestamp = None\n        self.cpus = []\n        idle_state = -1 if no_idle else None\n        for _ in range(num_cores):\n            self.cpus.append(CpuPowerState(idle_state=idle_state))\n\n    def copy(self):\n        new = SystemPowerState(self.num_cores)\n        new.timestamp = self.timestamp\n        for i, c in enumerate(self.cpus):\n            new.cpus[i].frequency = c.frequency\n            new.cpus[i].idle_state = c.idle_state\n        return new\n\n    def __str__(self):\n        return 'SP(t:{} Cs:{})'.format(self.timestamp, self.cpus)\n\n    __repr__ = __str__\n\n\nclass PowerStateProcessor(object):\n    \"\"\"\n    This takes a stream of power transition events and yields a timeline stream\n    of system power states.\n\n    \"\"\"\n\n    @property\n    def cpu_states(self):\n        return self.power_state.cpus\n\n    @property\n    def current_time(self):\n        return self.power_state.timestamp\n\n    @current_time.setter\n    def current_time(self, value):\n        self.power_state.timestamp = value\n\n    def __init__(self, cpus, wait_for_marker=True, no_idle=None):\n        if no_idle is None:\n            no_idle = not (cpus[0].cpuidle and cpus[0].cpuidle.states)\n        self.power_state = SystemPowerState(len(cpus), no_idle=no_idle)\n        self.requested_states = {}  # cpu_id -> requeseted state\n        self.wait_for_marker = wait_for_marker\n        self._saw_start_marker = False\n        self._saw_stop_marker = False\n        self.exceptions = []\n\n        self.idle_related_cpus = build_idle_state_map(cpus)\n\n    def process(self, event_stream):\n        for event in event_stream:\n            try:\n                next_state = self.update_power_state(event)\n                if self._saw_start_marker or not self.wait_for_marker:\n                    yield next_state\n                if self._saw_stop_marker:\n                    break\n            except Exception as e:  # pylint: disable=broad-except\n                self.exceptions.append(e)\n        else:\n            if self.wait_for_marker:\n                logger.warning(\"Did not see a STOP marker in the trace\")\n\n    def update_power_state(self, event):\n        \"\"\"\n        Update the tracked power state based on the specified event and\n        return updated power state.\n\n        \"\"\"\n        if event.kind == 'transition':\n            self._process_transition(event)\n        elif event.kind == 'dropped_events':\n            self._process_dropped_events(event)\n        elif event.kind == 'marker':\n            if event.name == 'START':\n                self._saw_start_marker = True\n            elif event.name == 'STOP':\n                self._saw_stop_marker = True\n        else:\n            raise ValueError('Unexpected event type: {}'.format(event.kind))\n        return self.power_state.copy()\n\n    def _process_transition(self, event):\n        self.current_time = event.timestamp\n        if event.idle_state is None:\n            self.cpu_states[event.cpu_id].frequency = event.frequency\n        else:\n            if event.idle_state == -1:\n                self._process_idle_exit(event)\n            else:\n                self._process_idle_entry(event)\n\n    def _process_dropped_events(self, event):\n        self.cpu_states[event.cpu_id].frequency = None\n        old_idle_state = self.cpu_states[event.cpu_id].idle_state\n        self.cpu_states[event.cpu_id].idle_state = None\n\n        related_ids = self.idle_related_cpus[(event.cpu_id, old_idle_state)]\n        for rid in related_ids:\n            self.cpu_states[rid].idle_state = None\n\n    def _process_idle_entry(self, event):\n        if self.cpu_states[event.cpu_id].is_idling:\n            raise ValueError('Got idle state entry event for an idling core: {}'.format(event))\n        self.requested_states[event.cpu_id] = event.idle_state\n        self._try_transition_to_idle_state(event.cpu_id, event.idle_state)\n\n    def _process_idle_exit(self, event):\n        if self.cpu_states[event.cpu_id].is_active:\n            raise ValueError('Got idle state exit event for an active core: {}'.format(event))\n        self.requested_states.pop(event.cpu_id, None)  # remove outstanding request if there is one\n        old_state = self.cpu_states[event.cpu_id].idle_state\n        self.cpu_states[event.cpu_id].idle_state = -1\n\n        related_ids = self.idle_related_cpus[(event.cpu_id, old_state)]\n        if old_state is not None:\n            new_state = old_state - 1\n            for rid in related_ids:\n                if self.cpu_states[rid].idle_state > new_state:\n                    self._try_transition_to_idle_state(rid, new_state)\n\n    def _try_transition_to_idle_state(self, cpu_id, idle_state):\n        related_ids = self.idle_related_cpus[(cpu_id, idle_state)]\n\n        # Tristate: True - can transition, False - can't transition,\n        #           None - unknown idle state on at least one related cpu\n        transition_check = self._can_enter_state(related_ids, idle_state)\n\n        if transition_check is None:\n            # Unknown state on a related cpu means we're not sure whether we're\n            # entering requested state or a shallower one\n            self.cpu_states[cpu_id].idle_state = None\n            return\n\n        # Keep trying shallower states until all related\n        while not self._can_enter_state(related_ids, idle_state):\n            idle_state -= 1\n            related_ids = self.idle_related_cpus[(cpu_id, idle_state)]\n\n        self.cpu_states[cpu_id].idle_state = idle_state\n        for rid in related_ids:\n            self.cpu_states[rid].idle_state = idle_state\n\n    def _can_enter_state(self, related_ids, state):\n        \"\"\"\n        This is a tri-state check. Returns ``True`` if related cpu states allow transition\n        into this state, ``False`` if related cpu states don't allow transition into this\n        state, and ``None`` if at least one of the related cpus is in an unknown state\n        (so the decision of whether a transition is possible cannot be made).\n\n        \"\"\"\n        for rid in related_ids:\n            rid_requested_state = self.requested_states.get(rid, None)\n            rid_current_state = self.cpu_states[rid].idle_state\n            if rid_current_state is None:\n                return None\n            if rid_current_state < state:\n                if rid_requested_state is None or rid_requested_state < state:\n                    return False\n        return True\n\n\ndef stream_cpu_power_transitions(events):\n    for event in events:\n        if event.name == 'cpu_idle':\n            state = c_int32(event.state).value\n            yield CorePowerTransitionEvent(event.timestamp, event.cpu_id, idle_state=state)\n        elif event.name == 'cpu_frequency':\n            yield CorePowerTransitionEvent(event.timestamp, event.cpu_id, frequency=event.state)\n        elif event.name == 'DROPPED EVENTS DETECTED':\n            yield CorePowerDroppedEvents(event.cpu_id)\n        elif event.name == 'print':\n            if TRACE_MARKER_START in event.text:\n                yield TraceMarkerEvent('START')\n            elif TRACE_MARKER_STOP in event.text:\n                yield TraceMarkerEvent('STOP')\n            else:\n                if 'cpu_frequency' in event.text:\n                    match = DEVLIB_CPU_FREQ_REGEX.search(event.text)\n                else:\n                    match = INIT_CPU_FREQ_REGEX.search(event.text)\n                if match:\n                    yield CorePowerTransitionEvent(event.timestamp,\n                                                   int(match.group('cpu')),\n                                                   frequency=int(match.group('freq')))\n\n\ndef gather_core_states(system_state_stream, freq_dependent_idle_states=None):  # NOQA\n    if freq_dependent_idle_states is None:\n        freq_dependent_idle_states = []\n    for system_state in system_state_stream:\n        core_states = []\n        for cpu in system_state.cpus:\n            if cpu.idle_state == -1:\n                core_states.append((-1, cpu.frequency))\n            elif cpu.idle_state in freq_dependent_idle_states:\n                if cpu.frequency is not None:\n                    core_states.append((cpu.idle_state, cpu.frequency))\n                else:\n                    core_states.append((None, None))\n            else:\n                core_states.append((cpu.idle_state, None))\n        yield (system_state.timestamp, core_states)\n\n\ndef record_state_transitions(reporter, stream):\n    for event in stream:\n        if event.kind == 'transition':\n            reporter.record_transition(event)\n        yield event\n\n\nclass PowerStateTransitions(object):\n\n    name = 'transitions-timeline'\n\n    def __init__(self, output_directory):\n        self.filepath = os.path.join(output_directory, 'state-transitions-timeline.csv')\n        self.writer, self._wfh = create_writer(self.filepath)\n        headers = ['timestamp', 'cpu_id', 'frequency', 'idle_state']\n        self.writer.writerow(headers)\n\n    def update(self, timestamp, core_states):  # NOQA\n        # Just recording transitions, not doing anything\n        # with states.\n        pass\n\n    def record_transition(self, transition):\n        row = [transition.timestamp, transition.cpu_id,\n               transition.frequency, transition.idle_state]\n        self.writer.writerow(row)\n\n    def report(self):\n        return self\n\n    def write(self):\n        self._wfh.close()\n\n\nclass PowerStateTimeline(object):\n\n    name = 'state-timeline'\n\n    def __init__(self, output_directory, cpus):\n        self.filepath = os.path.join(output_directory, 'power-state-timeline.csv')\n        self.idle_state_names = {cpu.id: [s.name for s in cpu.cpuidle.states] for cpu in cpus}\n        self.writer, self._wfh = create_writer(self.filepath)\n        headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]\n        self.writer.writerow(headers)\n\n    def update(self, timestamp, core_states):  # NOQA\n        row = [timestamp]\n        for cpu_idx, (idle_state, frequency) in enumerate(core_states):\n            if frequency is None:\n                if idle_state == -1:\n                    row.append('Running (unknown kHz)')\n                elif idle_state is None:\n                    row.append('unknown')\n                elif not self.idle_state_names[cpu_idx]:\n                    row.append('idle[{}]'.format(idle_state))\n                else:\n                    row.append(self.idle_state_names[cpu_idx][idle_state])\n            else:  # frequency is not None\n                if idle_state == -1:\n                    row.append(frequency)\n                elif idle_state is None:\n                    row.append('unknown')\n                else:\n                    row.append('{} ({})'.format(self.idle_state_names[cpu_idx][idle_state],\n                                                frequency))\n        self.writer.writerow(row)\n\n    def report(self):\n        return self\n\n    def write(self):\n        self._wfh.close()\n\n\nclass ParallelStats(object):\n\n    def __init__(self, output_directory, cpus, use_ratios=False):\n        self.filepath = os.path.join(output_directory, 'parallel-stats.csv')\n        self.clusters = defaultdict(set)\n        self.use_ratios = use_ratios\n\n        clusters = []\n        for cpu in cpus:\n            if cpu.cpufreq.related_cpus not in clusters:\n                clusters.append(cpu.cpufreq.related_cpus)\n\n        for i, clust in enumerate(clusters):\n            self.clusters[str(i)] = set(clust)\n        self.clusters['all'] = {cpu.id for cpu in cpus}\n\n        self.first_timestamp = None\n        self.last_timestamp = None\n        self.previous_states = None\n        self.parallel_times = defaultdict(lambda: defaultdict(int))\n        self.running_times = defaultdict(int)\n\n    def update(self, timestamp, core_states):\n        if self.last_timestamp is not None:\n            delta = timestamp - self.last_timestamp\n            active_cores = [i for i, c in enumerate(self.previous_states)\n                            if c and c[0] == -1]\n            for cluster, cluster_cores in self.clusters.items():\n                clust_active_cores = len(cluster_cores.intersection(active_cores))\n                self.parallel_times[cluster][clust_active_cores] += delta\n                if clust_active_cores:\n                    self.running_times[cluster] += delta\n        else:  # initial update\n            self.first_timestamp = timestamp\n\n        self.last_timestamp = timestamp\n        self.previous_states = core_states\n\n    def report(self):  # NOQA\n        if self.last_timestamp is None:\n            return None\n\n        report = ParallelReport(self.filepath)\n        total_time = self.last_timestamp - self.first_timestamp\n        for cluster in sorted(self.parallel_times):\n            running_time = self.running_times[cluster]\n            for n in range(len(self.clusters[cluster]) + 1):\n                time = self.parallel_times[cluster][n]\n                time_pc = time / total_time\n                if not self.use_ratios:\n                    time_pc *= 100\n                if n:\n                    if running_time:\n                        running_time_pc = time / running_time\n                    else:\n                        running_time_pc = 0\n                    if not self.use_ratios:\n                        running_time_pc *= 100\n                else:\n                    running_time_pc = 0\n                precision = 3 if self.use_ratios else 1\n                fmt = '{{:.{}f}}'.format(precision)\n                report.add([cluster, n,\n                            fmt.format(time),\n                            fmt.format(time_pc),\n                            fmt.format(running_time_pc),\n                            ])\n        return report\n\n\nclass ParallelReport(object):\n\n    name = 'parallel-stats'\n\n    def __init__(self, filepath):\n        self.filepath = filepath\n        self.values = []\n\n    def add(self, value):\n        self.values.append(value)\n\n    def write(self):\n        with csvwriter(self.filepath) as writer:\n            writer.writerow(['cluster', 'number_of_cores', 'total_time', '%time', '%running_time'])\n            writer.writerows(self.values)\n\n\nclass PowerStateStats(object):\n\n    def __init__(self, output_directory, cpus, use_ratios=False):\n        self.filepath = os.path.join(output_directory, 'power-state-stats.csv')\n        self.core_names = [cpu.name for cpu in cpus]\n        self.idle_state_names = {cpu.id: [s.name for s in cpu.cpuidle.states] for cpu in cpus}\n        self.use_ratios = use_ratios\n        self.first_timestamp = None\n        self.last_timestamp = None\n        self.previous_states = None\n        self.cpu_states = defaultdict(lambda: defaultdict(int))\n\n    def update(self, timestamp, core_states):  # NOQA\n        if self.last_timestamp is not None:\n            delta = timestamp - self.last_timestamp\n            for cpu, (idle, freq) in enumerate(self.previous_states):\n                if idle == -1:\n                    if freq is not None:\n                        state = '{:07}KHz'.format(freq)\n                    else:\n                        state = 'Running (unknown KHz)'\n                elif freq:\n                    state = '{}-{:07}KHz'.format(self.idle_state_names[cpu][idle], freq)\n                elif idle is not None and self.idle_state_names[cpu]:\n                    state = self.idle_state_names[cpu][idle]\n                else:\n                    state = 'unknown'\n                self.cpu_states[cpu][state] += delta\n        else:  # initial update\n            self.first_timestamp = timestamp\n\n        self.last_timestamp = timestamp\n        self.previous_states = core_states\n\n    def report(self):\n        if self.last_timestamp is None:\n            return None\n        total_time = self.last_timestamp - self.first_timestamp\n        state_stats = defaultdict(lambda: [None] * len(self.core_names))\n\n        for cpu, states in self.cpu_states.items():\n            for state in states:\n                time = states[state]\n                time_pc = time / total_time\n                if not self.use_ratios:\n                    time_pc *= 100\n                state_stats[state][cpu] = time_pc\n\n        precision = 3 if self.use_ratios else 1\n        return PowerStateStatsReport(self.filepath, state_stats, self.core_names, precision)\n\n\nclass PowerStateStatsReport(object):\n\n    name = 'power-state-stats'\n\n    def __init__(self, filepath, state_stats, core_names, precision=2):\n        self.filepath = filepath\n        self.state_stats = state_stats\n        self.core_names = core_names\n        self.precision = precision\n\n    def write(self):\n        with csvwriter(self.filepath) as writer:\n            headers = ['state'] + ['{} CPU{}'.format(c, i)\n                                   for i, c in enumerate(self.core_names)]\n            writer.writerow(headers)\n            for state in sorted(self.state_stats):\n                stats = self.state_stats[state]\n                fmt = '{{:.{}f}}'.format(self.precision)\n                writer.writerow([state] + [fmt.format(s if s is not None else 0)\n                                           for s in stats])\n\n\nclass CpuUtilizationTimeline(object):\n\n    name = 'utilization-timeline'\n\n    def __init__(self, output_directory, cpus):\n        self.filepath = os.path.join(output_directory, 'utilization-timeline.csv')\n        self.writer, self._wfh = create_writer(self.filepath)\n\n        headers = ['ts'] + ['{} CPU{}'.format(cpu.name, cpu.id) for cpu in cpus]\n        self.writer.writerow(headers)\n        self._max_freq_list = [cpu.cpufreq.available_frequencies[-1] for cpu in cpus if cpu.cpufreq.available_frequencies]\n\n    def update(self, timestamp, core_states):  # NOQA\n        row = [timestamp]\n        for core, [_, frequency] in enumerate(core_states):\n            if frequency is not None and core in self._max_freq_list:\n                frequency /= float(self._max_freq_list[core])\n                row.append(frequency)\n            else:\n                row.append(None)\n        self.writer.writerow(row)\n\n    def report(self):\n        return self\n\n    def write(self):\n        self._wfh.close()\n\n\ndef build_idle_state_map(cpus):\n    idle_state_map = defaultdict(list)\n    for cpu_idx, cpu in enumerate(cpus):\n        related_cpus = set(cpu.cpufreq.related_cpus) - set([cpu_idx])\n        first_cluster_state = cpu.cpuidle.num_states - 1\n        for state_idx, _ in enumerate(cpu.cpuidle.states):\n            if state_idx < first_cluster_state:\n                idle_state_map[(cpu_idx, state_idx)] = []\n            else:\n                idle_state_map[(cpu_idx, state_idx)] = list(related_cpus)\n    return idle_state_map\n\n\ndef report_power_stats(trace_file, cpus, output_basedir, use_ratios=False, no_idle=None,  # pylint: disable=too-many-locals\n                       split_wfi_states=False):\n    \"\"\"\n    Process trace-cmd output to generate timelines and statistics of CPU power\n    state (a.k.a P- and C-state) transitions in the trace.\n\n    The results will be written into a subdirectory called \"power-stats\" under\n    the specified ``output_basedir``.\n\n    :param trace_file: trace-cmd's text trace to process.\n    :param cpus: A list of ``CpuInfo`` objects describing a target's CPUs.\n                 These are typically reported as part of ``TargetInfo`` in\n                 WA output.\n    :param output_basedir: Base location for the output. This directory must\n                        exist and must not contain a directory of file\n                        named ``\"power-states\"``.\n    :param use_rations: By default, stats will be reported as percentages. Set\n                        this to ``True`` to report stats as decimals in the\n                        ``0 <= value <= 1`` instead.\n    :param no_idle: ``False`` if cpuidle and at least one idle state per CPU are\n                    enabled, should be ``True`` otherwise. This influences the\n                    assumptions about CPU's initial states. If not explicitly\n                    set, the value for this will be guessed based on whether\n                    cpuidle states are present in the first ``CpuInfo``.\n\n\n    The output directory will contain the following files:\n\n    power-state-stats.csv\n        Power state residency statistics for each CPU. Shows the percentage of\n        time a CPU has spent in each of its available power states.\n\n    parallel-stats.csv\n        Parallel execution stats for each CPU cluster, and combined stats for\n        the whole system.\n\n    power-state-timeline.csv\n        Timeline of CPU power states. Shows which power state each CPU is in at\n        a point in time.\n\n    state-transitions-timeline.csv\n        Timeline of CPU power state transitions. Each entry shows a CPU's\n        transition from one power state to another.\n\n    utilzation-timeline.csv\n        Timeline of CPU utilizations.\n\n    .. note:: Timeline entries aren't at regular intervals, but at times of\n              power transition events.\n\n    Stats are generated by assembling a pipeline consisting of the following\n    stages:\n\n        1. Parse trace into trace events\n        2. Filter trace events into power state transition events\n        3. Record power state transitions\n        4. Convert transitions into a power states.\n        5. Collapse the power states into timestamped ``(C state, P state)``\n           tuples for each cpu.\n        6. Update reporters/stats generators with cpu states.\n\n    \"\"\"\n    output_directory = os.path.join(output_basedir, 'power-states')\n    if not os.path.isdir(output_directory):\n        os.mkdir(output_directory)\n\n    freq_dependent_idle_states = []\n    if split_wfi_states:\n        freq_dependent_idle_states = [0]\n\n    # init trace, processor, and reporters\n    # note: filter_markers is False here, even though we *will* filter by them. The\n    #       reason for this is that we want to observe events before the start\n    #       marker in order to establish the intial power states.\n    parser = TraceCmdParser(filter_markers=False,\n                            events=['cpu_idle', 'cpu_frequency', 'print'])\n    ps_processor = PowerStateProcessor(cpus, wait_for_marker=trace_has_marker(trace_file),\n                                       no_idle=no_idle)\n    transitions_reporter = PowerStateTransitions(output_directory)\n    reporters = [\n        ParallelStats(output_directory, cpus, use_ratios),\n        PowerStateStats(output_directory, cpus, use_ratios),\n        PowerStateTimeline(output_directory, cpus),\n        CpuUtilizationTimeline(output_directory, cpus),\n        transitions_reporter,\n    ]\n\n    # assemble the pipeline\n    event_stream = parser.parse(trace_file)\n    transition_stream = stream_cpu_power_transitions(event_stream)\n    recorded_trans_stream = record_state_transitions(transitions_reporter, transition_stream)\n    power_state_stream = ps_processor.process(recorded_trans_stream)\n    core_state_stream = gather_core_states(power_state_stream, freq_dependent_idle_states)\n\n    # execute the pipeline\n    for timestamp, states in core_state_stream:\n        for reporter in reporters:\n            reporter.update(timestamp, states)\n\n    # report any issues encountered while executing the pipeline\n    if ps_processor.exceptions:\n        logger.warning('There were errors while processing trace:')\n        for e in ps_processor.exceptions:\n            logger.warning(str(e))\n\n    # generate reports\n    reports = {}\n    for reporter in reporters:\n        report = reporter.report()\n        report.write()\n        reports[report.name] = report\n    return reports\n"
  },
  {
    "path": "wa/utils/diff.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport re\nimport logging\n\n\nfrom builtins import zip  # pylint: disable=redefined-builtin\nfrom future.moves.itertools import zip_longest\n\nfrom wa.utils.misc import diff_tokens, write_table\nfrom wa.utils.misc import ensure_file_directory_exists as _f\n\nlogger = logging.getLogger('diff')\n\n\ndef diff_interrupt_files(before, after, result):  # pylint: disable=R0914\n    output_lines = []\n    with open(before) as bfh:\n        with open(after) as ofh:\n            for bline, aline in zip(bfh, ofh):\n                bchunks = bline.strip().split()\n                while True:\n                    achunks = aline.strip().split()\n                    if achunks[0] == bchunks[0]:\n                        diffchunks = ['']\n                        diffchunks.append(achunks[0])\n                        diffchunks.extend([diff_tokens(b, a) for b, a\n                                           in zip(bchunks[1:], achunks[1:])])\n                        output_lines.append(diffchunks)\n                        break\n                    else:  # new category appeared in the after file\n                        diffchunks = ['>'] + achunks\n                        output_lines.append(diffchunks)\n                        try:\n                            aline = next(ofh)\n                        except StopIteration:\n                            break\n\n    # Offset heading columns by one to allow for row labels on subsequent\n    # lines.\n    output_lines[0].insert(0, '')\n\n    # Any \"columns\" that do not have headings in the first row are not actually\n    # columns -- they are a single column where space-spearated words got\n    # split. Merge them back together to prevent them from being\n    # column-aligned by write_table.\n    table_rows = [output_lines[0]]\n    num_cols = len(output_lines[0])\n    for row in output_lines[1:]:\n        table_row = row[:num_cols]\n        table_row.append(' '.join(row[num_cols:]))\n        table_rows.append(table_row)\n\n    with open(result, 'w') as wfh:\n        write_table(table_rows, wfh)\n\n\ndef diff_sysfs_dirs(before, after, result):  # pylint: disable=R0914\n    before_files = []\n    for root, _, files in os.walk(before):\n        before_files.extend([os.path.join(root, f) for f in files])\n    before_files = list(filter(os.path.isfile, before_files))\n    files = [os.path.relpath(f, before) for f in before_files]\n    after_files = [os.path.join(after, f) for f in files]\n    diff_files = [os.path.join(result, f) for f in files]\n\n    for bfile, afile, dfile in zip(before_files, after_files, diff_files):\n        if not os.path.isfile(afile):\n            logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))\n            continue\n\n        with open(bfile) as bfh, open(afile) as afh:  # pylint: disable=C0321\n            with open(_f(dfile), 'w') as dfh:\n                for i, (bline, aline) in enumerate(zip_longest(bfh, afh), 1):\n                    if aline is None:\n                        logger.debug('Lines missing from {}'.format(afile))\n                        break\n                    bchunks = re.split(r'(\\W+)', bline)\n                    achunks = re.split(r'(\\W+)', aline)\n                    if len(bchunks) != len(achunks):\n                        logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))\n                        dfh.write('xxx ' + bline)\n                        continue\n                    if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2)\n                            and (bchunks[0] == achunks[0])):\n                        # if there are only two columns and the first column is the\n                        # same, assume it's a \"header\" column and do not diff it.\n                        dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]\n                    else:\n                        dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]\n                    dfh.write(''.join(dchunks))\n"
  },
  {
    "path": "wa/utils/doc.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nUtilities for working with and formatting documentation.\n\n\"\"\"\nimport os\nimport re\nimport inspect\nfrom itertools import cycle\n\nUSER_HOME = os.path.expanduser('~')\n\nBULLET_CHARS = '-*'\n\n\ndef get_summary(aclass):\n    \"\"\"\n    Returns the summary description for an extension class. The summary is the\n    first paragraph (separated by blank line) of the description taken either from\n    the ``descripton`` attribute of the class, or if that is not present, from the\n    class' docstring.\n\n    \"\"\"\n    return get_description(aclass).split('\\n\\n')[0]\n\n\ndef get_description(aclass):\n    \"\"\"\n    Return the description of the specified extension class. The description is taken\n    either from ``description`` attribute of the class or its docstring.\n\n    \"\"\"\n    if hasattr(aclass, 'description') and aclass.description:\n        return inspect.cleandoc(aclass.description)\n    if aclass.__doc__:\n        return inspect.getdoc(aclass)\n    else:\n        return 'no documentation found for {}'.format(aclass.__name__)\n\n\ndef get_type_name(obj):\n    \"\"\"Returns the name of the type object or function specified. In case of a lambda,\n    the definiition is returned with the parameter replaced by \"value\".\"\"\"\n    match = re.search(r\"<(type|class|function) '?(.*?)'?>\", str(obj))\n    if isinstance(obj, tuple):\n        name = obj[1]\n    elif match.group(1) == 'function':\n        text = str(obj)\n        name = text.split()[1]\n        if name.endswith('<lambda>'):\n            source = inspect.getsource(obj).strip().replace('\\n', ' ')\n            match = re.search(r'lambda\\s+(\\w+)\\s*:\\s*(.*?)\\s*[\\n,]', source)\n            if not match:\n                raise ValueError('could not get name for {}'.format(obj))\n            name = match.group(2).replace(match.group(1), 'value')\n    else:\n        name = match.group(2)\n        if '.' in name:\n            name = name.split('.')[-1]\n    return name\n\n\ndef count_leading_spaces(text):\n    \"\"\"\n    Counts the number of leading space characters in a string.\n\n    TODO: may need to update this to handle whitespace, but shouldn't\n          be necessary as there should be no tabs in Python source.\n\n    \"\"\"\n    nspaces = 0\n    for c in text:\n        if c == ' ':\n            nspaces += 1\n        else:\n            break\n    return nspaces\n\n\ndef format_column(text, width):\n    \"\"\"\n    Formats text into a column of specified width. If a line is too long,\n    it will be broken on a word boundary. The new lines will have the same\n    number of leading spaces as the original line.\n\n    Note: this will not attempt to join up lines that are too short.\n\n    \"\"\"\n    formatted = []\n    for line in text.split('\\n'):\n        line_len = len(line)\n        if line_len <= width:\n            formatted.append(line)\n        else:\n            words = line.split(' ')\n            new_line = words.pop(0)\n            while words:\n                next_word = words.pop(0)\n                if (len(new_line) + len(next_word) + 1) < width:\n                    new_line += ' ' + next_word\n                else:\n                    formatted.append(new_line)\n                    new_line = ' ' * count_leading_spaces(new_line) + next_word\n            formatted.append(new_line)\n    return '\\n'.join(formatted)\n\n\ndef format_bullets(text, width, char='-', shift=3, outchar=None):\n    \"\"\"\n    Formats text into bulleted list. Assumes each line of input that starts with\n    ``char`` (possibly preceeded with whitespace) is a new bullet point. Note: leading\n    whitespace in the input will *not* be preserved. Instead, it will be determined by\n    ``shift`` parameter.\n\n    :text: the text to be formated\n    :width: format width (note: must be at least ``shift`` + 4).\n    :char: character that indicates a new bullet point in the input text.\n    :shift: How far bulleted entries will be indented. This indicates the indentation\n            level of the bullet point. Text indentation level will be ``shift`` + 3.\n    :outchar: character that will be used to mark bullet points in the output. If\n              left as ``None``, ``char`` will be used.\n\n    \"\"\"\n    bullet_lines = []\n    output = ''\n\n    def __process_bullet(bullet_lines):\n        if bullet_lines:\n            bullet = format_paragraph(indent(' '.join(bullet_lines), shift + 2), width)\n            bullet = bullet[:3] + outchar + bullet[4:]\n            del bullet_lines[:]\n            return bullet + '\\n'\n        else:\n            return ''\n\n    if outchar is None:\n        outchar = char\n    for line in text.split('\\n'):\n        line = line.strip()\n        if line.startswith(char):  # new bullet\n            output += __process_bullet(bullet_lines)\n            line = line[1:].strip()\n        bullet_lines.append(line)\n    output += __process_bullet(bullet_lines)\n    return output\n\n\ndef format_simple_table(rows, headers=None, align='>', show_borders=True, borderchar='='):  # pylint: disable=R0914\n    \"\"\"Formats a simple table.\"\"\"\n    if not rows:\n        return ''\n    rows = [list(map(str, r)) for r in rows]\n    num_cols = len(rows[0])\n\n    # cycle specified alignments until we have num_cols of them. This is\n    # consitent with how such cases are handled in R, pandas, etc.\n    it = cycle(align)\n    align = [next(it) for _ in range(num_cols)]\n\n    cols = list(zip(*rows))\n    col_widths = [max(list(map(len, c))) for c in cols]\n    if headers:\n        col_widths = [max(len(h), cw) for h, cw in zip(headers, col_widths)]\n    row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])\n    row_format += '\\n'\n\n    border = row_format.format(*[borderchar * cw for cw in col_widths])\n\n    result = border if show_borders else ''\n    if headers:\n        result += row_format.format(*headers)\n        result += border\n    for row in rows:\n        result += row_format.format(*row)\n    if show_borders:\n        result += border\n    return result\n\n\ndef format_paragraph(text, width):\n    \"\"\"\n    Format the specified text into a column of specified with. The text is\n    assumed to be a single paragraph and existing line breaks will not be preserved.\n    Leading spaces (of the initial line), on the other hand, will be preserved.\n\n    \"\"\"\n    text = re.sub('\\n\\n*\\\\s*', ' ', text.strip('\\n'))\n    return format_column(text, width)\n\n\ndef format_body(text, width):\n    \"\"\"\n    Format the specified text into a column  of specified width. The text is\n    assumed to be a \"body\" of one or more paragraphs separated by one or more\n    blank lines. The initial indentation of the first line of each paragraph\n    will be presevered, but any other formatting may be clobbered.\n\n    \"\"\"\n    text = re.sub('\\n\\\\s*\\n', '\\n\\n', text.strip('\\n'))  # get rid of all-whitespace lines\n    paragraphs = re.split('\\n\\n+', text)\n    formatted_paragraphs = []\n    for p in paragraphs:\n        if p.strip() and p.strip()[0] in BULLET_CHARS:\n            formatted_paragraphs.append(format_bullets(p, width))\n        else:\n            formatted_paragraphs.append(format_paragraph(p, width))\n    return '\\n\\n'.join(formatted_paragraphs)\n\n\ndef strip_inlined_text(text):\n    \"\"\"\n    This function processes multiline inlined text (e.g. form docstrings)\n    to strip away leading spaces and leading and trailing new lines.\n\n    \"\"\"\n    text = text.strip('\\n')\n    lines = [ln.rstrip() for ln in text.split('\\n')]\n\n    # first line is special as it may not have the indet that follows the\n    # others, e.g. if it starts on the same as the multiline quote (\"\"\").\n    nspaces = count_leading_spaces(lines[0])\n\n    if len([ln for ln in lines if ln]) > 1:\n        to_strip = min(count_leading_spaces(ln) for ln in lines[1:] if ln)\n        if nspaces >= to_strip:\n            stripped = [lines[0][to_strip:]]\n        else:\n            stripped = [lines[0][nspaces:]]\n        stripped += [ln[to_strip:] for ln in lines[1:]]\n    else:\n        stripped = [lines[0][nspaces:]]\n    return '\\n'.join(stripped).strip('\\n')\n\n\ndef indent(text, spaces=4):\n    \"\"\"Indent the lines i the specified text by ``spaces`` spaces.\"\"\"\n    indented = []\n    for line in text.split('\\n'):\n        if line:\n            indented.append(' ' * spaces + line)\n        else:  # do not indent emtpy lines\n            indented.append(line)\n    return '\\n'.join(indented)\n\n\ndef format_literal(lit):\n    if isinstance(lit, str):\n        return '``\\'{}\\'``'.format(lit)\n    elif hasattr(lit, 'pattern'):  # regex\n        return '``r\\'{}\\'``'.format(lit.pattern)\n    elif isinstance(lit, dict):\n        content = indent(',\\n'.join(\"{}: {}\".format(key, val) for (key, val) in lit.items()))\n        return '::\\n\\n{}'.format(indent('{{\\n{}\\n}}'.format(content)))\n    else:\n        return '``{}``'.format(lit)\n\n\ndef get_params_rst(parameters):\n    text = ''\n    for param in parameters:\n        text += '{}: {}\\n'.format(param.name, param.mandatory and '(mandatory)' or ' ')\n        text += indent(\"type: ``'{}'``\\n\\n\".format(get_type_name(param.kind)))\n        desc = strip_inlined_text(param.description or '')\n        text += indent('{}\\n'.format(desc))\n        if param.aliases:\n            text += indent('\\naliases: {}\\n'.format(', '.join(map(format_literal, param.aliases))))\n        if param.global_alias:\n            text += indent('\\nglobal alias: {}\\n'.format(format_literal(param.global_alias)))\n        if param.allowed_values:\n            text += indent('\\nallowed values: {}\\n'.format(', '.join(map(format_literal, param.allowed_values))))\n        elif param.constraint:\n            text += indent('\\nconstraint: ``{}``\\n'.format(get_type_name(param.constraint)))\n        if param.default is not None:\n            value = param.default\n            if isinstance(value, str) and value.startswith(USER_HOME):\n                value = value.replace(USER_HOME, '~')\n            text += indent('\\ndefault: {}\\n'.format(format_literal(value)))\n        text += '\\n'\n    return text\n\n\ndef get_aliases_rst(aliases):\n    text = ''\n    for alias in aliases:\n        param_str = ', '.join(['{}={}'.format(n, format_literal(v))\n                               for n, v in alias.params.items()])\n        text += '{}\\n{}\\n\\n'.format(alias.name, indent(param_str))\n    return text\n\n\ndef underline(text, symbol='='):\n    return '{}\\n{}\\n\\n'.format(text, symbol * len(text))\n\n\ndef line_break(length=10, symbol='-'):\n    \"\"\"Insert a line break\"\"\"\n    return '\\n{}\\n\\n'.format(symbol * length)\n\n\ndef get_rst_from_plugin(plugin):\n    text = underline(plugin.name, '-')\n    if hasattr(plugin, 'description'):\n        desc = strip_inlined_text(plugin.description or '')\n    elif plugin.__doc__:\n        desc = strip_inlined_text(plugin.__doc__)\n    else:\n        desc = ''\n    text += desc + '\\n\\n'\n\n    aliases_rst = get_aliases_rst(plugin.aliases)\n    if aliases_rst:\n        text += underline('aliases', '~') + aliases_rst\n\n    params_rst = get_params_rst(plugin.parameters)\n    if params_rst:\n        text += underline('parameters', '~') + params_rst\n\n    return text + '\\n'\n"
  },
  {
    "path": "wa/utils/exec_control.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# \"environment\" management:\n__environments = {}\n__active_environment = None\n\n\ndef activate_environment(name):\n    \"\"\"\n    Sets the current tracking environment to ``name``. If an\n    environment with that name does not already exist, it will be\n    created.\n    \"\"\"\n    # pylint: disable=W0603\n    global __active_environment\n\n    if name not in list(__environments.keys()):\n        init_environment(name)\n    __active_environment = name\n\n\ndef init_environment(name):\n    \"\"\"\n    Create a new environment called ``name``, but do not set it as the\n    current environment.\n\n    :raises: ``ValueError`` if an environment with name ``name``\n             already exists.\n    \"\"\"\n    if name in list(__environments.keys()):\n        msg = \"Environment {} already exists\".format(name)\n        raise ValueError(msg)\n    __environments[name] = []\n\n\ndef reset_environment(name=None):\n    \"\"\"\n    Reset method call tracking for environment ``name``. If ``name`` is\n    not specified or is ``None``, reset the current active environment.\n\n    :raises: ``ValueError`` if an environment with name ``name``\n          does not exist.\n    \"\"\"\n\n    if name is not None:\n        if name not in list(__environments.keys()):\n            msg = \"Environment {} does not exist\".format(name)\n            raise ValueError(msg)\n        __environments[name] = []\n    else:\n        if __active_environment is None:\n            activate_environment('default')\n        __environments[__active_environment] = []\n\n\n# The decorators:\ndef once_per_instance(method):\n    \"\"\"\n    The specified method will be invoked only once for every bound\n    instance within the environment.\n    \"\"\"\n    def wrapper(*args, **kwargs):\n        if __active_environment is None:\n            activate_environment('default')\n        func_id = repr(method.__hash__()) + repr(args[0].__hash__())\n        if func_id in __environments[__active_environment]:\n            return\n        else:\n            __environments[__active_environment].append(func_id)\n        return method(*args, **kwargs)\n\n    return wrapper\n\n\ndef once_per_class(method):\n    \"\"\"\n    The specified method will be invoked only once for all instances\n    of a class within the environment.\n    \"\"\"\n    def wrapper(*args, **kwargs):\n        if __active_environment is None:\n            activate_environment('default')\n\n        func_id = repr(method.__name__) + repr(args[0].__class__)\n\n        if func_id in __environments[__active_environment]:\n            return\n        else:\n            __environments[__active_environment].append(func_id)\n        return method(*args, **kwargs)\n\n    return wrapper\n\n\ndef once_per_attribute_value(attr_name):\n    \"\"\"\n    The specified method will be invoked once for all instances that share the\n    same value for the specified attribute (sameness is established by comparing\n    repr() of the values).\n    \"\"\"\n    def wrapped_once_per_attribute_value(method):\n        def wrapper(*args, **kwargs):\n            if __active_environment is None:\n                activate_environment('default')\n\n            attr_value = getattr(args[0], attr_name)\n            func_id = repr(method.__name__) + repr(args[0].__class__) + repr(attr_value)\n\n            if func_id in __environments[__active_environment]:\n                return\n            else:\n                __environments[__active_environment].append(func_id)\n            return method(*args, **kwargs)\n\n        return wrapper\n    return wrapped_once_per_attribute_value\n\n\ndef once(method):\n    \"\"\"\n    The specified method will be invoked only once within the\n    environment.\n    \"\"\"\n    def wrapper(*args, **kwargs):\n        if __active_environment is None:\n            activate_environment('default')\n\n        func_id = repr(method.__code__)\n\n        if func_id in __environments[__active_environment]:\n            return\n        else:\n            __environments[__active_environment].append(func_id)\n        return method(*args, **kwargs)\n\n    return wrapper\n"
  },
  {
    "path": "wa/utils/formatter.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa.utils.terminalsize import get_terminal_size\n\n\nINDENTATION_FROM_TITLE = 4\n\n\nclass TextFormatter(object):\n\n    \"\"\"\n    This is a base class for text formatting. It mainly ask to implement two\n    methods which are add_item and format_data. The formar will add new text to\n    the formatter, whereas the latter will return a formatted text. The name\n    attribute represents the name of the foramtter.\n    \"\"\"\n\n    name = None\n    data = None\n\n    def __init__(self):\n        pass\n\n    def add_item(self, new_data, item_title):\n        \"\"\"\n        Add new item to the text formatter.\n\n        :param new_data: The data to be added\n        :param item_title: A title for the added data\n        \"\"\"\n        raise NotImplementedError()\n\n    def format_data(self):\n        \"\"\"\n        It returns a formatted text\n        \"\"\"\n        raise NotImplementedError()\n\n\nclass DescriptionListFormatter(TextFormatter):\n\n    name = 'description_list_formatter'\n    data = None\n\n    def get_text_width(self):\n        if not self._text_width:\n            self._text_width, _ = get_terminal_size()  # pylint: disable=unpacking-non-sequence\n        return self._text_width\n\n    def set_text_width(self, value):\n        self._text_width = value\n\n    text_width = property(get_text_width, set_text_width)\n\n    def __init__(self, title=None, width=None):\n        super(DescriptionListFormatter, self).__init__()\n        self.data_title = title\n        self._text_width = width\n        self.longest_word_length = 0\n        self.data = []\n\n    def add_item(self, new_data, item_title):\n        if len(item_title) > self.longest_word_length:\n            self.longest_word_length = len(item_title)\n        self.data[len(self.data):] = [(item_title, self._remove_newlines(new_data))]\n\n    def format_data(self):\n        parag_indentation = self.longest_word_length + INDENTATION_FROM_TITLE\n        string_formatter = '{}:<{}{} {}'.format('{', parag_indentation, '}', '{}')\n\n        formatted_data = ''\n        if self.data_title:\n            formatted_data += self.data_title\n\n        line_width = self.text_width - parag_indentation\n        for title, paragraph in self.data:\n            formatted_data += '\\n'\n            title_len = self.longest_word_length - len(title)\n            title += ':'\n            if title_len > 0:\n                title = (' ' * title_len) + title\n\n            parag_lines = self._break_lines(paragraph, line_width).splitlines()\n            if parag_lines:\n                formatted_data += string_formatter.format(title, parag_lines[0])\n                for line in parag_lines[1:]:\n                    formatted_data += '\\n' + string_formatter.format('', line)\n            else:\n                formatted_data += title[:-1]\n\n        self.text_width = None\n        return formatted_data\n\n    # Return text's paragraphs sperated in a list, such that each index in the\n    # list is a single text paragraph with no new lines\n    def _remove_newlines(self, new_data):  # pylint: disable=R0201\n        parag_list = ['']\n        parag_num = 0\n        prv_parag = None\n        # For each paragraph sperated by a new line\n        for paragraph in new_data.splitlines():\n            if paragraph:\n                parag_list[parag_num] += ' ' + paragraph\n            # if the previous line is NOT empty, then add new empty index for\n            # the next paragraph\n            elif prv_parag:\n                parag_num = 1\n                parag_list.append('')\n            prv_parag = paragraph\n\n        # sometimes, we end up with an empty string as the last item so we reomve it\n        if not parag_list[-1]:\n            return parag_list[:-1]\n        return parag_list\n\n    def _break_lines(self, parag_list, line_width):  # pylint: disable=R0201\n        formatted_paragraphs = []\n        for para in parag_list:\n            words = para.split()\n            if words:\n                formatted_text = words.pop(0)\n                current_width = len(formatted_text)\n                # for each word in the paragraph, line width is an accumlation of\n                # word length + 1 (1 is for the space after each word).\n                for word in words:\n                    word = word.strip()\n                    if current_width + len(word) + 1 >= line_width:\n                        formatted_text += '\\n' + word\n                        current_width = len(word)\n                    else:\n                        formatted_text += ' ' + word\n                        current_width += len(word) + 1\n                formatted_paragraphs.append(formatted_text)\n        return '\\n\\n'.join(formatted_paragraphs)\n"
  },
  {
    "path": "wa/utils/log.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# pylint: disable=E1101\nimport logging\nimport logging.handlers\nimport os\nimport string\nimport subprocess\nimport threading\nfrom contextlib import contextmanager\n\nimport colorama\n\nfrom devlib import DevlibError\n\nfrom wa.framework import signal\nfrom wa.framework.exception import WAError\nfrom wa.utils.misc import get_traceback\n\n\nCOLOR_MAP = {\n    logging.DEBUG: colorama.Fore.BLUE,\n    logging.INFO: colorama.Fore.GREEN,\n    logging.WARNING: colorama.Fore.YELLOW,\n    logging.ERROR: colorama.Fore.RED,\n    logging.CRITICAL: colorama.Style.BRIGHT + colorama.Fore.RED,\n}\n\nRESET_COLOR = colorama.Style.RESET_ALL\n\nDEFAULT_INIT_BUFFER_CAPACITY = 1000\n\n_indent_level = 0\n_indent_width = 4\n_console_handler = None\n_init_handler = None\n\n\n# pylint: disable=global-statement\ndef init(verbosity=logging.INFO, color=True, indent_with=4,\n         regular_fmt='%(levelname)-8s %(message)s',\n         verbose_fmt='%(asctime)s %(levelname)-8s %(name)10.10s: %(message)s',\n         debug=False):\n    global _indent_width, _console_handler, _init_handler\n    _indent_width = indent_with\n    signal.log_error_func = lambda m: log_error(m, signal.logger)\n\n    root_logger = logging.getLogger()\n    root_logger.setLevel(logging.DEBUG)\n\n    error_handler = ErrorSignalHandler(logging.DEBUG)\n    root_logger.addHandler(error_handler)\n\n    _console_handler = logging.StreamHandler()\n    if color:\n        formatter = ColorFormatter\n    else:\n        formatter = LineFormatter\n    if verbosity:\n        _console_handler.setLevel(logging.DEBUG)\n        _console_handler.setFormatter(formatter(verbose_fmt))\n    else:\n        _console_handler.setLevel(logging.INFO)\n        _console_handler.setFormatter(formatter(regular_fmt))\n    root_logger.addHandler(_console_handler)\n\n    buffer_capacity = int(os.getenv('WA_LOG_BUFFER_CAPACITY',\n                                    str(DEFAULT_INIT_BUFFER_CAPACITY)))\n    _init_handler = InitHandler(buffer_capacity)\n    _init_handler.setLevel(logging.DEBUG)\n    root_logger.addHandler(_init_handler)\n\n    logging.basicConfig(level=logging.DEBUG)\n    if not debug:\n        logging.raiseExceptions = False\n\n    logger = logging.getLogger('CGroups')\n    logger.info = logger.debug\n\n\ndef set_level(level):\n    _console_handler.setLevel(level)\n\n\n# pylint: disable=global-statement\ndef add_file(filepath, level=logging.DEBUG,\n             fmt='%(asctime)s %(levelname)-8s %(name)10.10s: %(message)s'):\n    global _init_handler\n    root_logger = logging.getLogger()\n    file_handler = logging.FileHandler(filepath)\n    file_handler.setLevel(level)\n    file_handler.setFormatter(LineFormatter(fmt))\n\n    if _init_handler:\n        _init_handler.flush_to_target(file_handler)\n        root_logger.removeHandler(_init_handler)\n        _init_handler = None\n\n    root_logger.addHandler(file_handler)\n\n\ndef enable(logs):\n    if isinstance(logs, list):\n        for log in logs:\n            __enable_logger(log)\n    else:\n        __enable_logger(logs)\n\n\ndef disable(logs):\n    if isinstance(logs, list):\n        for log in logs:\n            __disable_logger(log)\n    else:\n        __disable_logger(logs)\n\n\ndef __enable_logger(logger):\n    if isinstance(logger, str):\n        logger = logging.getLogger(logger)\n    logger.propagate = True\n\n\ndef __disable_logger(logger):\n    if isinstance(logger, str):\n        logger = logging.getLogger(logger)\n    logger.propagate = False\n\n\n# pylint: disable=global-statement\ndef indent():\n    global _indent_level\n    _indent_level += 1\n\n\n# pylint: disable=global-statement\ndef dedent():\n    global _indent_level\n    _indent_level -= 1\n\n\n@contextmanager\ndef indentcontext():\n    indent()\n    try:\n        yield\n    finally:\n        dedent()\n\n\n# pylint: disable=global-statement\ndef set_indent_level(level):\n    global _indent_level\n    old_level = _indent_level\n    _indent_level = level\n    return old_level\n\n\ndef log_error(e, logger, critical=False):\n    \"\"\"\n    Log the specified Exception as an error. The Error message will be formatted\n    differently depending on the nature of the exception.\n\n    :e: the error to log. should be an instance of ``Exception``\n    :logger: logger to be used.\n    :critical: if ``True``,  this error will be logged at ``logging.CRITICAL``\n               level, otherwise it will be logged as ``logging.ERROR``.\n\n    \"\"\"\n    if getattr(e, 'logged', None):\n        return\n\n    if critical:\n        log_func = logger.critical\n    else:\n        log_func = logger.error\n\n    if isinstance(e, KeyboardInterrupt):\n        old_level = set_indent_level(0)\n        logger.info('Got CTRL-C. Aborting.')\n        set_indent_level(old_level)\n    elif isinstance(e, (WAError, DevlibError)):\n        log_func(str(e))\n    elif isinstance(e, subprocess.CalledProcessError):\n        tb = get_traceback()\n        log_func(tb)\n        command = e.cmd\n        if e.args:\n            command = '{} {}'.format(command, ' '.join(map(str, e.args)))\n        message = 'Command \\'{}\\' returned non-zero exit status {}\\nOUTPUT:\\n{}\\n'\n        log_func(message.format(command, e.returncode, e.output))\n    elif isinstance(e, SyntaxError):\n        tb = get_traceback()\n        log_func(tb)\n        message = 'Syntax Error in {}, line {}, offset {}:'\n        log_func(message.format(e.filename, e.lineno, e.offset))\n        log_func('\\t{}'.format(e.msg))\n    else:\n        tb = get_traceback()\n        log_func(tb)\n        log_func('{}({})'.format(e.__class__.__name__, e))\n\n    e.logged = True\n\n\nclass ErrorSignalHandler(logging.Handler):\n    \"\"\"\n    Emits signals for ERROR and WARNING level traces.\n\n    \"\"\"\n\n    def emit(self, record):\n        if record.levelno == logging.ERROR:\n            signal.send(signal.ERROR_LOGGED, self, record)\n        elif record.levelno == logging.WARNING:\n            signal.send(signal.WARNING_LOGGED, self, record)\n\n\nclass InitHandler(logging.handlers.BufferingHandler):\n    \"\"\"\n    Used to buffer early logging records before a log file is created.\n\n    \"\"\"\n\n    def __init__(self, capacity):\n        super(InitHandler, self).__init__(capacity)\n        self.targets = []\n\n    def emit(self, record):\n        record.indent_level = _indent_level\n        super(InitHandler, self).emit(record)\n\n    def flush(self):\n        for target in self.targets:\n            self.flush_to_target(target)\n        self.buffer = []\n\n    def add_target(self, target):\n        if target not in self.targets:\n            self.targets.append(target)\n\n    def flush_to_target(self, target):\n        for record in self.buffer:\n            target.emit(record)\n\n\nclass LineFormatter(logging.Formatter):\n    \"\"\"\n    Logs each line of the message separately.\n\n    \"\"\"\n\n    def format(self, record):\n        record.message = record.getMessage()\n        if self.usesTime():\n            record.asctime = self.formatTime(record, self.datefmt)\n\n        indent_level = getattr(record, 'indent_level', _indent_level)\n        cur_indent = _indent_width * indent_level\n        d = record.__dict__\n        parts = []\n        for line in record.message.split('\\n'):\n            line = ' ' * cur_indent + line\n            d.update({'message': line.strip('\\r')})\n            parts.append(self._fmt % d)\n\n        return '\\n'.join(parts)\n\n\nclass ColorFormatter(LineFormatter):\n    \"\"\"\n    Formats logging records with color and prepends record info\n    to each line of the message.\n\n        BLUE for DEBUG logging level\n        GREEN for INFO logging level\n        YELLOW for WARNING logging level\n        RED for ERROR logging level\n        BOLD RED for CRITICAL logging level\n\n    \"\"\"\n\n    def __init__(self, fmt=None, datefmt=None):\n        super(ColorFormatter, self).__init__(fmt, datefmt)\n        template_text = self._fmt.replace('%(message)s', RESET_COLOR + '%(message)s${color}')\n        template_text = '${color}' + template_text + RESET_COLOR\n        self.fmt_template = string.Template(template_text)\n\n    def format(self, record):\n        self._set_color(COLOR_MAP[record.levelno])\n        return super(ColorFormatter, self).format(record)\n\n    def _set_color(self, color):\n        self._fmt = self.fmt_template.substitute(color=color)\n\n\nclass BaseLogWriter(object):\n\n    def __init__(self, name, level=logging.DEBUG):\n        \"\"\"\n        File-like object class designed to be used for logging from streams\n        Each complete line (terminated by new line character) gets logged\n        at DEBUG level. In complete lines are buffered until the next new line.\n\n        :param name: The name of the logger that will be used.\n\n        \"\"\"\n        self.logger = logging.getLogger(name)\n        self.buffer = ''\n        if level == logging.DEBUG:\n            self.do_write = self.logger.debug\n        elif level == logging.INFO:\n            self.do_write = self.logger.info\n        elif level == logging.WARNING:\n            self.do_write = self.logger.warning\n        elif level == logging.ERROR:\n            self.do_write = self.logger.error\n        else:\n            raise Exception('Unknown logging level: {}'.format(level))\n\n    def flush(self):\n        # Defined to match the interface expected by pexpect.\n        return self\n\n    def close(self):\n        if self.buffer:\n            self.logger.debug(self.buffer)\n            self.buffer = ''\n        return self\n\n    def __del__(self):\n        # Ensure we don't lose bufferd output\n        self.close()\n\n\nclass LogWriter(BaseLogWriter):\n\n    def write(self, data):\n        data = data.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n        if '\\n' in data:\n            parts = data.split('\\n')\n            parts[0] = self.buffer + parts[0]\n            for part in parts[:-1]:\n                self.do_write(part)\n            self.buffer = parts[-1]\n        else:\n            self.buffer += data\n        return self\n\n\nclass LineLogWriter(BaseLogWriter):\n\n    def write(self, data):\n        self.do_write(data)\n\n\nclass StreamLogger(threading.Thread):\n    \"\"\"\n    Logs output from a stream in a thread.\n\n    \"\"\"\n\n    def __init__(self, name, stream, level=logging.DEBUG, klass=LogWriter):\n        super(StreamLogger, self).__init__()\n        self.writer = klass(name, level)\n        self.stream = stream\n        self.daemon = True\n\n    def run(self):\n        line = self.stream.readline()\n        while line:\n            self.writer.write(line.rstrip('\\n'))\n            line = self.stream.readline()\n        self.writer.close()\n"
  },
  {
    "path": "wa/utils/misc.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nMiscellaneous functions that don't fit anywhere else.\n\n\"\"\"\n\nimport errno\nimport hashlib\nimport importlib\nimport inspect\nimport logging\nimport math\nimport os\nimport pathlib\nimport random\nimport re\nimport shutil\nimport string\nimport subprocess\nimport sys\nimport traceback\nimport uuid\nfrom contextlib import contextmanager\nfrom datetime import datetime, timedelta\nfrom functools import reduce  # pylint: disable=redefined-builtin\nfrom operator import mul\nfrom tempfile import gettempdir, NamedTemporaryFile\nfrom time import sleep\nfrom io import StringIO\n# pylint: disable=wrong-import-position,unused-import\nfrom itertools import chain, cycle\n\ntry:\n    from shutil import which as find_executable\nexcept ImportError:\n    from distutils.spawn import find_executable  # pylint: disable=no-name-in-module, import-error\n\nfrom dateutil import tz\n\n# pylint: disable=wrong-import-order\nfrom devlib.exception import TargetError\nfrom devlib.utils.misc import (ABI_MAP, check_output, walk_modules,\n                               ensure_directory_exists, ensure_file_directory_exists,\n                               normalize, convert_new_lines, get_cpu_mask, unique,\n                               isiterable, getch, as_relative, ranges_to_list, memoized,\n                               list_to_ranges, list_to_mask, mask_to_list, which,\n                               to_identifier, safe_extract, LoadSyntaxError)\n\ncheck_output_logger = logging.getLogger('check_output')\n\nfile_lock_logger = logging.getLogger('file_lock')\nat_write_logger = logging.getLogger('at_write')\n\n\n# Defined here rather than in wa.exceptions due to module load dependencies\ndef diff_tokens(before_token, after_token):\n    \"\"\"\n    Creates a diff of two tokens.\n\n    If the two tokens are the same it just returns returns the token\n    (whitespace tokens are considered the same irrespective of type/number\n    of whitespace characters in the token).\n\n    If the tokens are numeric, the difference between the two values\n    is returned.\n\n    Otherwise, a string in the form [before -> after] is returned.\n\n    \"\"\"\n    if before_token.isspace() and after_token.isspace():\n        return after_token\n    elif before_token.isdigit() and after_token.isdigit():\n        try:\n            diff = int(after_token) - int(before_token)\n            return str(diff)\n        except ValueError:\n            return \"[%s -> %s]\" % (before_token, after_token)\n    elif before_token == after_token:\n        return after_token\n    else:\n        return \"[%s -> %s]\" % (before_token, after_token)\n\n\ndef prepare_table_rows(rows):\n    \"\"\"Given a list of lists, make sure they are prepared to be formatted into a table\n    by making sure each row has the same number of columns and stringifying all values.\"\"\"\n    rows = [list(map(str, r)) for r in rows]\n    max_cols = max(list(map(len, rows)))\n    for row in rows:\n        pad = max_cols - len(row)\n        for _ in range(pad):\n            row.append('')\n    return rows\n\n\ndef write_table(rows, wfh, align='>', headers=None):  # pylint: disable=R0914\n    \"\"\"Write a column-aligned table to the specified file object.\"\"\"\n    if not rows:\n        return\n    rows = prepare_table_rows(rows)\n    num_cols = len(rows[0])\n\n    # cycle specified alignments until we have max_cols of them. This is\n    # consitent with how such cases are handled in R, pandas, etc.\n    it = cycle(align)\n    align = [next(it) for _ in range(num_cols)]\n\n    cols = list(zip(*rows))\n    col_widths = [max(list(map(len, c))) for c in cols]\n    if headers:\n        col_widths = [max([c, len(h)]) for c, h in zip(col_widths, headers)]\n    row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])\n    row_format += '\\n'\n\n    if headers:\n        wfh.write(row_format.format(*headers))\n        underlines = ['-' * len(h) for h in headers]\n        wfh.write(row_format.format(*underlines))\n\n    for row in rows:\n        wfh.write(row_format.format(*row))\n\n\ndef get_null():\n    \"\"\"Returns the correct null sink based on the OS.\"\"\"\n    return 'NUL' if os.name == 'nt' else '/dev/null'\n\n\ndef get_traceback(exc=None):\n    \"\"\"\n    Returns the string with the traceback for the specifiec exc\n    object, or for the current exception exc is not specified.\n\n    \"\"\"\n    if exc is None:\n        exc = sys.exc_info()\n    if not exc:\n        return None\n    tb = exc[2]\n    sio = StringIO()\n    traceback.print_tb(tb, file=sio)\n    del tb  # needs to be done explicitly see: http://docs.python.org/2/library/sys.html#sys.exc_info\n    return sio.getvalue()\n\n\ndef _check_remove_item(the_list, item):\n    \"\"\"Helper function for merge_lists that implements checking wether an items\n    should be removed from the list and doing so if needed. Returns ``True`` if\n    the item has been removed and ``False`` otherwise.\"\"\"\n    if not isinstance(item, str):\n        return False\n    if not item.startswith('~'):\n        return False\n    actual_item = item[1:]\n    if actual_item in the_list:\n        del the_list[the_list.index(actual_item)]\n    return True\n\n\nVALUE_REGEX = re.compile(r'(\\d+(?:\\.\\d+)?)\\s*(\\w*)')\n\nUNITS_MAP = {\n    's': 'seconds',\n    'ms': 'milliseconds',\n    'us': 'microseconds',\n    'ns': 'nanoseconds',\n    'V': 'volts',\n    'A': 'amps',\n    'mA': 'milliamps',\n    'J': 'joules',\n}\n\n\ndef parse_value(value_string):\n    \"\"\"parses a string representing a numerical value and returns\n    a tuple (value, units), where value will be either int or float,\n    and units will be a string representing the units or None.\"\"\"\n    match = VALUE_REGEX.search(value_string)\n    if match:\n        vs = match.group(1)\n        value = float(vs) if '.' in vs else int(vs)\n        us = match.group(2)\n        units = UNITS_MAP.get(us, us)\n        return (value, units)\n    else:\n        return (value_string, None)\n\n\ndef get_meansd(values):\n    \"\"\"Returns mean and standard deviation of the specified values.\"\"\"\n    if not values:\n        return float('nan'), float('nan')\n    mean = sum(values) / len(values)\n    sd = math.sqrt(sum([(v - mean) ** 2 for v in values]) / len(values))\n    return mean, sd\n\n\ndef geomean(values):\n    \"\"\"Returns the geometric mean of the values.\"\"\"\n    return reduce(mul, values) ** (1.0 / len(values))\n\n\ndef capitalize(text):\n    \"\"\"Capitalises the specified text: first letter upper case,\n    all subsequent letters lower case.\"\"\"\n    if not text:\n        return ''\n    return text[0].upper() + text[1:].lower()\n\n\ndef utc_to_local(dt):\n    \"\"\"Convert naive datetime to local time zone, assuming UTC.\"\"\"\n    return dt.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())\n\n\ndef local_to_utc(dt):\n    \"\"\"Convert naive datetime to UTC, assuming local time zone.\"\"\"\n    return dt.replace(tzinfo=tz.tzlocal()).astimezone(tz.tzutc())\n\n\ndef load_class(classpath):\n    \"\"\"Loads the specified Python class. ``classpath`` must be a fully-qualified\n    class name (i.e. namspaced under module/package).\"\"\"\n    modname, clsname = classpath.rsplit('.', 1)\n    mod = importlib.import_module(modname)\n    cls = getattr(mod, clsname)\n    if isinstance(cls, type):\n        return cls\n    else:\n        raise ValueError(f'The classpath \"{classpath}\" does not point at a class: {cls}')\n\n\ndef get_pager():\n    \"\"\"Returns the name of the system pager program.\"\"\"\n    pager = os.getenv('PAGER')\n    if pager is None:\n        pager = find_executable('less')\n    if pager is None:\n        pager = find_executable('more')\n    return pager\n\n\n_bash_color_regex = re.compile('\\x1b\\[[0-9;]+m')\n\n\ndef strip_bash_colors(text):\n    return _bash_color_regex.sub('', text)\n\n\ndef format_duration(seconds, sep=' ', order=['day', 'hour', 'minute', 'second']):  # pylint: disable=dangerous-default-value\n    \"\"\"\n    Formats the specified number of seconds into human-readable duration.\n\n    \"\"\"\n    if isinstance(seconds, timedelta):\n        td = seconds\n    else:\n        td = timedelta(seconds=seconds or 0)\n    dt = datetime(1, 1, 1) + td\n    result = []\n    for item in order:\n        value = getattr(dt, item, None)\n        if item == 'day':\n            value -= 1\n        if not value:\n            continue\n        suffix = '' if value == 1 else 's'\n        result.append('{} {}{}'.format(value, item, suffix))\n    return sep.join(result) if result else 'N/A'\n\n\ndef get_article(word):\n    \"\"\"\n    Returns the appropriate indefinite article for the word (ish).\n\n    .. note:: Indefinite article assignment in English is based on\n              sound rather than spelling, so this will not work correctly\n              in all case; e.g. this will return ``\"a hour\"``.\n\n    \"\"\"\n    return 'an' if word[0] in 'aoeiu' else 'a'\n\n\ndef get_random_string(length):\n    \"\"\"Returns a random ASCII string of the specified length).\"\"\"\n    return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))\n\n\ndef import_path(filepath, module_name=None):\n    \"\"\"\n    Programmatically import the given Python source file under the name\n    ``module_name``. If ``module_name`` is not provided, a stable name based on\n    ``filepath`` will be created. Note that this module name cannot be relied\n    on, so don't make write import statements assuming this will be stable in\n    the future.\n    \"\"\"\n    if not module_name:\n        path = pathlib.Path(filepath).resolve()\n        id_ = to_identifier(str(path))\n        module_name = f'wa._user_import.{id_}'\n\n    try:\n        return sys.modules[module_name]\n    except KeyError:\n        spec = importlib.util.spec_from_file_location(module_name, filepath)\n        module = importlib.util.module_from_spec(spec)\n        try:\n            sys.modules[module_name] = module\n            spec.loader.exec_module(module)\n        except BaseException:\n            sys.modules.pop(module_name, None)\n            raise\n        else:\n            # We could return the \"module\" object, but that would not take into\n            # account any manipulation the module did on sys.modules when\n            # executing. To be consistent with the import statement, re-lookup\n            # the module name.\n            return sys.modules[module_name]\n\n\ndef load_struct_from_python(filepath):\n    \"\"\"Parses a config structure from a .py file. The structure should be composed\n    of basic Python types (strings, ints, lists, dicts, etc.).\"\"\"\n\n    try:\n        mod = import_path(filepath)\n    except SyntaxError as e:\n        raise LoadSyntaxError(e.message, filepath, e.lineno)\n    else:\n        return {\n            k: v\n            for k, v in inspect.getmembers(mod)\n            if not k.startswith('_')\n        }\n\n\ndef open_file(filepath):\n    \"\"\"\n    Open the specified file path with the associated launcher in an OS-agnostic way.\n\n    \"\"\"\n    if os.name == 'nt':  # Windows\n        return os.startfile(filepath)  # pylint: disable=no-member\n    elif sys.platform == 'darwin':  # Mac OSX\n        return subprocess.call(['open', filepath])\n    else:  # assume Linux or similar running a freedesktop-compliant GUI\n        return subprocess.call(['xdg-open', filepath])\n\n\ndef sha256(path, chunk=2048):\n    \"\"\"Calculates SHA256 hexdigest of the file at the specified path.\"\"\"\n    h = hashlib.sha256()\n    with open(path, 'rb') as fh:\n        buf = fh.read(chunk)\n        while buf:\n            h.update(buf)\n            buf = fh.read(chunk)\n    return h.hexdigest()\n\n\ndef urljoin(*parts):\n    return '/'.join(p.rstrip('/') for p in parts)\n\n\n# From: http://eli.thegreenplace.net/2011/10/19/perls-guess-if-file-is-text-or-binary-implemented-in-python/\ndef istextfile(fileobj, blocksize=512):\n    \"\"\" Uses heuristics to guess whether the given file is text or binary,\n        by reading a single block of bytes from the file.\n        If more than 30% of the chars in the block are non-text, or there\n        are NUL ('\\x00') bytes in the block, assume this is a binary file.\n    \"\"\"\n    _text_characters = (b''.join(chr(i) for i in range(32, 127))\n                        + b'\\n\\r\\t\\f\\b')\n\n    block = fileobj.read(blocksize)\n    if b'\\x00' in block:\n        # Files with null bytes are binary\n        return False\n    elif not block:\n        # An empty file is considered a valid text file\n        return True\n\n    # Use translate's 'deletechars' argument to efficiently remove all\n    # occurrences of _text_characters from the block\n    nontext = block.translate(None, _text_characters)\n    return float(len(nontext)) / len(block) <= 0.30\n\n\ndef categorize(v):\n    if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):\n        return 'o'\n    elif hasattr(v, 'items'):\n        return 'm'\n    elif isiterable(v):\n        return 's'\n    elif v is None:\n        return 'n'\n    else:\n        return 'c'\n\n\n# pylint: disable=too-many-return-statements,too-many-branches\ndef merge_config_values(base, other):\n    \"\"\"\n    This is used to merge two objects, typically when setting the value of a\n    ``ConfigurationPoint``. First, both objects are categorized into\n\n        c: A scalar value. Basically, most objects. These values\n           are treated as atomic, and not mergeable.\n        s: A sequence. Anything iterable that is not a dict or\n           a string (strings are considered scalars).\n        m: A key-value mapping. ``dict`` and its derivatives.\n        n: ``None``.\n        o: A mergeable object; this is an object that implements both\n          ``merge_with`` and ``merge_into`` methods.\n\n    The merge rules based on the two categories are then as follows:\n\n        (c1, c2) --> c2\n        (s1, s2) --> s1 . s2\n        (m1, m2) --> m1 . m2\n        (c, s) --> [c] . s\n        (s, c) --> s . [c]\n        (s, m) --> s . [m]\n        (m, s) --> [m] . s\n        (m, c) --> ERROR\n        (c, m) --> ERROR\n        (o, X) --> o.merge_with(X)\n        (X, o) --> o.merge_into(X)\n        (X, n) --> X\n        (n, X) --> X\n\n    where:\n\n        '.'  means concatenation (for maps, contcationation of (k, v) streams\n             then converted back into a map). If the types of the two objects\n             differ, the type of ``other`` is used for the result.\n        'X'  means \"any category\"\n        '[]' used to indicate a literal sequence (not necessarily a ``list``).\n             when this is concatenated with an actual sequence, that sequencies\n             type is used.\n\n    notes:\n\n        - When a mapping is combined with a sequence, that mapping is\n          treated as a scalar value.\n        - When combining two mergeable objects, they're combined using\n          ``o1.merge_with(o2)`` (_not_ using o2.merge_into(o1)).\n        - Combining anything with ``None`` yields that value, irrespective\n          of the order. So a ``None`` value is eqivalent to the corresponding\n          item being omitted.\n        - When both values are scalars, merging is equivalent to overwriting.\n        - There is no recursion (e.g. if map values are lists, they will not\n          be merged; ``other`` will overwrite ``base`` values). If complicated\n          merging semantics (such as recursion) are required, they should be\n          implemented within custom mergeable types (i.e. those that implement\n          ``merge_with`` and ``merge_into``).\n\n    While this can be used as a generic \"combine any two arbitry objects\"\n    function, the semantics have been selected specifically for merging\n    configuration point values.\n\n    \"\"\"\n    cat_base = categorize(base)\n    cat_other = categorize(other)\n\n    if cat_base == 'n':\n        return other\n    elif cat_other == 'n':\n        return base\n\n    if cat_base == 'o':\n        return base.merge_with(other)\n    elif cat_other == 'o':\n        return other.merge_into(base)\n\n    if cat_base == 'm':\n        if cat_other == 's':\n            return merge_sequencies([base], other)\n        elif cat_other == 'm':\n            return merge_maps(base, other)\n        else:\n            message = 'merge error ({}, {}): \"{}\" and \"{}\"'\n            raise ValueError(message.format(cat_base, cat_other, base, other))\n    elif cat_base == 's':\n        if cat_other == 's':\n            return merge_sequencies(base, other)\n        else:\n            return merge_sequencies(base, [other])\n    else:  # cat_base == 'c'\n        if cat_other == 's':\n            return merge_sequencies([base], other)\n        elif cat_other == 'm':\n            message = 'merge error ({}, {}): \"{}\" and \"{}\"'\n            raise ValueError(message.format(cat_base, cat_other, base, other))\n        else:\n            return other\n\n\ndef merge_sequencies(s1, s2):\n    return type(s2)(unique(chain(s1, s2)))\n\n\ndef merge_maps(m1, m2):\n    return type(m2)(chain(iter(m1.items()), iter(m2.items())))\n\n\ndef merge_dicts_simple(base, other):\n    result = base.copy()\n    for key, value in (other or {}).items():\n        result[key] = merge_config_values(result.get(key), value)\n    return result\n\n\ndef touch(path):\n    with open(path, 'w'):\n        pass\n\n\ndef get_object_name(obj):\n    if hasattr(obj, 'name'):\n        return obj.name\n    elif hasattr(obj, '__func__') and hasattr(obj, '__self__'):\n        return '{}.{}'.format(get_object_name(obj.__self__.__class__),\n                              obj.__func__.__name__)\n    elif hasattr(obj, 'func_name'):\n        return obj.__name__\n    elif hasattr(obj, '__name__'):\n        return obj.__name__\n    elif hasattr(obj, '__class__'):\n        return obj.__class__.__name__\n    return None\n\n\ndef resolve_cpus(name, target):\n    \"\"\"\n    Returns a list of cpu numbers that corresponds to a passed name.\n    Allowed formats are:\n        - 'big'\n        - 'little'\n        - '<core_name> e.g. 'A15'\n        - 'cpuX'\n        - 'all' - returns all cpus\n        - '' - Empty name will also return all cpus\n    \"\"\"\n    cpu_list = list(range(target.number_of_cpus))\n\n    # Support for passing cpu no directly\n    if isinstance(name, int):\n        cpu = name\n        if cpu not in cpu_list:\n            message = 'CPU{} is not available, must be in {}'\n            raise ValueError(message.format(cpu, cpu_list))\n        return [cpu]\n\n    # Apply to all cpus\n    if not name or name.lower() == 'all':\n        return cpu_list\n    # Deal with big.little substitution\n    elif name.lower() == 'big':\n        name = target.big_core\n        if not name:\n            raise ValueError('big core name could not be retrieved')\n    elif name.lower() == 'little':\n        name = target.little_core\n        if not name:\n            raise ValueError('little core name could not be retrieved')\n\n    # Return all cores with specified name\n    if name in target.core_names:\n        return target.core_cpus(name)\n\n    # Check if core number has been supplied.\n    else:\n        core_no = re.match('cpu([0-9]+)', name, re.IGNORECASE)\n        if core_no:\n            cpu = int(core_no.group(1))\n            if cpu not in cpu_list:\n                message = 'CPU{} is not available, must be in {}'\n                raise ValueError(message.format(cpu, cpu_list))\n            return [cpu]\n        else:\n            msg = 'Unexpected core name \"{}\"'\n            raise ValueError(msg.format(name))\n\n\n@memoized\ndef resolve_unique_domain_cpus(name, target):\n    \"\"\"\n    Same as `resolve_cpus` above but only returns only the first cpu\n    in each of the different frequency domains. Requires cpufreq.\n    \"\"\"\n    cpus = resolve_cpus(name, target)\n    if not target.has('cpufreq'):\n        msg = 'Device does not appear to support cpufreq; ' \\\n              'Cannot obtain cpu domain information'\n        raise TargetError(msg)\n\n    unique_cpus = []\n    domain_cpus = []\n    for cpu in cpus:\n        if cpu not in domain_cpus:\n            domain_cpus = target.cpufreq.get_related_cpus(cpu)\n        if domain_cpus[0] not in unique_cpus:\n            unique_cpus.append(domain_cpus[0])\n    return unique_cpus\n\n\ndef format_ordered_dict(od):\n    \"\"\"\n    Provide a string representation of ordered dict that is similar to the\n    regular dict representation, as that is more concise and easier to read\n    than the default __str__ for OrderedDict.\n    \"\"\"\n    return '{{{}}}'.format(', '.join('{}={}'.format(k, v)\n                                     for k, v in od.items()))\n\n\n@contextmanager\ndef atomic_write_path(path, mode='w'):\n    \"\"\"\n    Gets a file path to write to which will be replaced with the original\n     file path to simulate an atomic write from the point of view\n    of other processes. This is achieved by writing to a tmp file and\n    replacing the exiting file to prevent inconsistencies.\n    \"\"\"\n    tmp_file = None\n    try:\n        tmp_file = NamedTemporaryFile(mode=mode, delete=False,\n                                      suffix=os.path.basename(path))\n        at_write_logger.debug('')\n        yield tmp_file.name\n        os.fsync(tmp_file.file.fileno())\n    finally:\n        if tmp_file:\n            tmp_file.close()\n    at_write_logger.debug('Moving {} to {}'.format(tmp_file.name, path))\n    safe_move(tmp_file.name, path)\n\n\ndef safe_move(src, dst):\n    \"\"\"\n    Taken from: https://alexwlchan.net/2019/03/atomic-cross-filesystem-moves-in-python/\n\n    Rename a file from ``src`` to ``dst``.\n\n    *   Moves must be atomic.  ``shutil.move()`` is not atomic.\n    *   Moves must work across filesystems and ``os.rename()`` can\n        throw errors if run across filesystems.\n\n    So we try ``os.rename()``, but if we detect a cross-filesystem copy, we\n    switch to ``shutil.move()`` with some wrappers to make it atomic.\n    \"\"\"\n    try:\n        os.rename(src, dst)\n    except OSError as err:\n\n        if err.errno == errno.EXDEV:\n            # Generate a unique ID, and copy `<src>` to the target directory\n            # with a temporary name `<dst>.<ID>.tmp`.  Because we're copying\n            # across a filesystem boundary, this initial copy may not be\n            # atomic.  We intersperse a random UUID so if different processes\n            # are copying into `<dst>`, they don't overlap in their tmp copies.\n            copy_id = uuid.uuid4()\n            tmp_dst = \"%s.%s.tmp\" % (dst, copy_id)\n            shutil.copyfile(src, tmp_dst)\n\n            # Then do an atomic rename onto the new name, and clean up the\n            # source image.\n            os.rename(tmp_dst, dst)\n            os.unlink(src)\n        else:\n            raise\n\n\n@contextmanager\ndef lock_file(path, timeout=30):\n    \"\"\"\n    Enable automatic locking and unlocking of a file path given. Used to\n    prevent synchronisation issues between multiple wa processes.\n    Uses a default timeout of 30 seconds which should be overridden for files\n    that are expect to be unavailable for longer periods of time.\n    \"\"\"\n\n    # Import here to avoid circular imports\n    # pylint: disable=wrong-import-position,cyclic-import, import-outside-toplevel\n    from wa.framework.exception import ResourceError\n\n    locked = False\n    l_file = 'wa-{}.lock'.format(path)\n    l_file = os.path.join(gettempdir(), l_file.replace(os.path.sep, '_'))\n    file_lock_logger.debug('Acquiring lock on \"{}\"'.format(path))\n    try:\n        while timeout:\n            try:\n                open(l_file, 'x').close()\n                locked = True\n                file_lock_logger.debug('Lock acquired on \"{}\"'.format(path))\n                break\n            except FileExistsError:\n                msg = 'Failed to acquire lock on \"{}\" Retrying...'\n                file_lock_logger.debug(msg.format(l_file))\n                sleep(1)\n                timeout -= 1\n        else:\n            msg = 'Failed to acquire lock file \"{}\" within the timeout. \\n' \\\n                  'If there are no other running WA processes please delete ' \\\n                  'this file and retry.'\n            raise ResourceError(msg.format(os.path.abspath(l_file)))\n        yield\n    finally:\n        if locked and os.path.exists(l_file):\n            os.remove(l_file)\n            file_lock_logger.debug('Lock released \"{}\"'.format(path))\n"
  },
  {
    "path": "wa/utils/postgres.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nThis module contains additional casting and adaptation functions for several\ndifferent datatypes and metadata types for use with the psycopg2 module. The\ncasting functions will transform Postgresql data types into Python objects, and\nthe adapters the reverse. They are named this way according to the psycopg2\nconventions.\n\nFor more information about the available adapters and casters in the standard\npsycopg2 module, please see:\n\nhttp://initd.org/psycopg/docs/extensions.html#sql-adaptation-protocol-objects\n\n\"\"\"\n\nimport re\nimport os\n\ntry:\n    from psycopg2 import InterfaceError\n    from psycopg2.extensions import AsIs\nexcept ImportError:\n    InterfaceError = None\n    AsIs = None\n\nfrom wa.utils.types import level\n\n\nPOSTGRES_SCHEMA_DIR = os.path.join(os.path.dirname(__file__),\n                                   '..',\n                                   'commands',\n                                   'postgres_schemas')\n\n\ndef cast_level(value, cur):  # pylint: disable=unused-argument\n    \"\"\"Generic Level caster for psycopg2\"\"\"\n    if not InterfaceError:\n        raise ImportError('There was a problem importing psycopg2.')\n    if value is None:\n        return None\n\n    m = re.match(r\"([^\\()]*)\\((\\d*)\\)\", value)\n    name = str(m.group(1))\n    number = int(m.group(2))\n\n    if m:\n        return level(name, number)\n    else:\n        raise InterfaceError(\"Bad level representation: {}\".format(value))\n\n\ndef cast_vanilla(value, cur):  # pylint: disable=unused-argument\n    \"\"\"Vanilla Type caster for psycopg2\n\n    Simply returns the string representation.\n    \"\"\"\n    if value is None:\n        return None\n    else:\n        return str(value)\n\n\n# List functions and classes for adapting\n\ndef adapt_level(a_level):\n    \"\"\"Generic Level Adapter for psycopg2\"\"\"\n    return \"{}({})\".format(a_level.name, a_level.value)\n\n\nclass ListOfLevel(object):\n    value = None\n\n    def __init__(self, a_level):\n        self.value = a_level\n\n    def return_original(self):\n        return self.value\n\n\ndef adapt_ListOfX(adapt_X):\n    \"\"\"This will create a multi-column adapter for a particular type.\n\n    Note that the type must itself need to be in array form. Therefore\n    this function serves to seaprate out individual lists into multiple\n    big lists.\n    E.g. if the X adapter produces array (a,b,c)\n    then this adapter will take an list of Xs and produce a master array:\n    ((a1,a2,a3),(b1,b2,b3),(c1,c2,c3))\n\n    Takes as its argument the adapter for the type which must produce an\n    SQL array string.\n    Note that you should NOT put the AsIs in the adapt_X function.\n\n    The need for this function arises from the fact that we may want to\n    actually handle list-creating types differently if they themselves\n    are in a list, as in the example above, we cannot simply adopt a\n    recursive strategy.\n\n    Note that master_list is the list representing the array. Each element\n    in the list will represent a subarray (column). If there is only one\n    subarray following processing then the outer {} are stripped to give a\n    1 dimensional array.\n    \"\"\"\n    def adapter_function(param):\n        if not AsIs:\n            raise ImportError('There was a problem importing psycopg2.')\n        param = param.value\n        result_list = []\n        for element in param:  # Where param will be a list of X's\n            result_list.append(adapt_X(element))\n        test_element = result_list[0]\n        num_items = len(test_element.split(\",\"))\n        master_list = []\n        for x in range(num_items):\n            master_list.append(\"\")\n        for element in result_list:\n            element = element.strip(\"{\").strip(\"}\")\n            element = element.split(\",\")\n            for x in range(num_items):\n                master_list[x] = master_list[x] + element[x] + \",\"\n        if num_items > 1:\n            master_sql_string = \"{\"\n        else:\n            master_sql_string = \"\"\n        for x in range(num_items):\n            # Remove trailing comma\n            master_list[x] = master_list[x].strip(\",\")\n            master_list[x] = \"{\" + master_list[x] + \"}\"\n            master_sql_string = master_sql_string + master_list[x] + \",\"\n        master_sql_string = master_sql_string.strip(\",\")\n        if num_items > 1:\n            master_sql_string = master_sql_string + \"}\"\n        return AsIs(\"'{}'\".format(master_sql_string))\n    return adapter_function\n\n\ndef return_as_is(adapt_X):\n    \"\"\"Returns the AsIs appended function of the function passed\n\n    This is useful for adapter functions intended to be used with the\n    adapt_ListOfX function, which must return strings, as it allows them\n    to be standalone adapters.\n    \"\"\"\n    if not AsIs:\n        raise ImportError('There was a problem importing psycopg2.')\n\n    def adapter_function(param):\n        return AsIs(\"'{}'\".format(adapt_X(param)))\n    return adapter_function\n\n\ndef adapt_vanilla(param):\n    \"\"\"Vanilla adapter: simply returns the string representation\"\"\"\n    if not AsIs:\n        raise ImportError('There was a problem importing psycopg2.')\n    return AsIs(\"'{}'\".format(param))\n\n\ndef create_iterable_adapter(array_columns, explicit_iterate=False):\n    \"\"\"Create an iterable adapter of a specified dimension\n\n    If explicit_iterate is True, then it will be assumed that the param needs\n    to be iterated upon via param.iteritems(). Otherwise it will simply be\n    iterated vanilla.\n    The value of array_columns will be equal to the number of indexed elements\n    per item in the param iterable. E.g. a list of 3-element-long lists has\n    3 elements per item in the iterable (the master list) and therefore\n    array_columns should be equal to 3.\n    If array_columns is 0, then this indicates that the iterable contains\n    single items.\n    \"\"\"\n    if not AsIs:\n        raise ImportError('There was a problem importing psycopg2.')\n\n    def adapt_iterable(param):\n        \"\"\"Adapts an iterable object into an SQL array\"\"\"\n        final_string = \"\"  # String stores a string representation of the array\n        if param:\n            if array_columns > 1:\n                for index in range(array_columns):\n                    array_string = \"\"\n                    for item in param.iteritems():\n                        array_string = array_string + str(item[index]) + \",\"\n                    array_string = array_string.strip(\",\")\n                    array_string = \"{\" + array_string + \"}\"\n                    final_string = final_string + array_string + \",\"\n                final_string = final_string.strip(\",\")\n            else:\n                # Simply return each item in the array\n                if explicit_iterate:\n                    for item in param.iteritems():\n                        final_string = final_string + str(item) + \",\"\n                else:\n                    for item in param:\n                        final_string = final_string + str(item) + \",\"\n        return AsIs(\"'{{{}}}'\".format(final_string))\n    return adapt_iterable\n\n\n# For reference only and future use\ndef adapt_list(param):\n    \"\"\"Adapts a list into an array\"\"\"\n    if not AsIs:\n        raise ImportError('There was a problem importing psycopg2.')\n    final_string = \"\"\n    if param:\n        for item in param:\n            final_string = final_string + str(item) + \",\"\n        final_string = \"{\" + final_string + \"}\"\n    return AsIs(\"'{}'\".format(final_string))\n\n\ndef get_schema(schemafilepath):\n    with open(schemafilepath, 'r') as sqlfile:\n        sql_commands = sqlfile.read()\n\n    schema_major = None\n    schema_minor = None\n    # Extract schema version if present\n    if sql_commands.startswith('--!VERSION'):\n        splitcommands = sql_commands.split('!ENDVERSION!\\n')\n        schema_major, schema_minor = splitcommands[0].strip('--!VERSION!').split('.')\n        schema_major = int(schema_major)\n        schema_minor = int(schema_minor)\n        sql_commands = splitcommands[1]\n    return schema_major, schema_minor, sql_commands\n\n\ndef get_database_schema_version(conn):\n    with conn.cursor() as cursor:\n        cursor.execute('''SELECT\n                              DatabaseMeta.schema_major,\n                              DatabaseMeta.schema_minor\n                          FROM\n                              DatabaseMeta;''')\n        schema_major, schema_minor = cursor.fetchone()\n    return (schema_major, schema_minor)\n\n\ndef get_schema_versions(conn):\n    schemafilepath = os.path.join(POSTGRES_SCHEMA_DIR, 'postgres_schema.sql')\n    cur_major_version, cur_minor_version, _ = get_schema(schemafilepath)\n    db_schema_version = get_database_schema_version(conn)\n    return (cur_major_version, cur_minor_version), db_schema_version\n"
  },
  {
    "path": "wa/utils/revent.py",
    "content": "#    Copyright 2016-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport logging\nimport os\nimport struct\nimport signal\nfrom datetime import datetime\nfrom collections import namedtuple\n\nfrom devlib.utils.misc import memoized\n\nfrom wa.framework.resource import Executable, NO_ONE, ResourceResolver\nfrom wa.utils.exec_control import once_per_class\n\n\nGENERAL_MODE = 0\nGAMEPAD_MODE = 1\n\n\nu16_struct = struct.Struct('<H')\nu32_struct = struct.Struct('<I')\nu64_struct = struct.Struct('<Q')\n\n# See revent section in WA documentation for the detailed description of\n# the recording format.\nheader_one_struct = struct.Struct('<6sH')\nheader_two_struct = struct.Struct('<H6x')  # version 2 onwards\n\ndevid_struct = struct.Struct('<4H')\ndevinfo_struct = struct.Struct('<4s96s96s96sI')\nabsinfo_struct = struct.Struct('<7i')\n\nevent_struct = struct.Struct('<HqqHHi')\nold_event_struct = struct.Struct(\"<i4xqqHHi\")  # prior to version 2\n\n\ndef read_struct(fh, struct_spec):\n    data = fh.read(struct_spec.size)\n    return struct_spec.unpack(data)\n\n\ndef read_string(fh):\n    length, = read_struct(fh, u32_struct)\n    str_struct = struct.Struct('<{}s'.format(length))\n    return read_struct(fh, str_struct)[0]\n\n\ndef count_bits(bitarr):\n    return sum(bin(b).count('1') for b in bitarr)\n\n\ndef is_set(bitarr, bit):\n    byte = bit // 8\n    bytebit = bit % 8\n    return bitarr[byte] & bytebit\n\n\nabsinfo = namedtuple('absinfo', 'ev_code value min max fuzz flat resolution')\n\n\nclass UinputDeviceInfo(object):\n\n    def __init__(self, fh):\n        parts = read_struct(fh, devid_struct)\n        self.bustype = parts[0]\n        self.vendor = parts[1]\n        self.product = parts[2]\n        self.version = parts[3]\n\n        self.name = read_string(fh)\n\n        parts = read_struct(fh, devinfo_struct)\n        self.ev_bits = bytearray(parts[0])\n        self.key_bits = bytearray(parts[1])\n        self.rel_bits = bytearray(parts[2])\n        self.abs_bits = bytearray(parts[3])\n        self.num_absinfo = parts[4]\n        self.absinfo = [absinfo(*read_struct(fh, absinfo_struct))\n                        for _ in range(self.num_absinfo)]\n\n    def __str__(self):\n        return 'UInputInfo({})'.format(self.__dict__)\n\n\nclass ReventEvent(object):\n\n    def __init__(self, fh, legacy=False):\n        if not legacy:\n            dev_id, ts_sec, ts_usec, type_, code, value = read_struct(fh, event_struct)\n        else:\n            dev_id, ts_sec, ts_usec, type_, code, value = read_struct(fh, old_event_struct)\n        self.device_id = dev_id\n        self.time = datetime.fromtimestamp(ts_sec + float(ts_usec) / 1000000)\n        self.type = type_\n        self.code = code\n        self.value = value\n\n    def __str__(self):\n        return 'InputEvent({})'.format(self.__dict__)\n\n\nclass ReventRecording(object):\n    \"\"\"\n    Represents a parsed revent recording. This contains input events and device\n    descriptions recorded by revent. Two parsing modes are supported. By\n    default, the recording will be parsed in the \"streaming\" mode. In this\n    mode, initial headers and device descriptions are parsed on creation and an\n    open file handle to the recording is saved. Events will be read from the\n    file as they are being iterated over. In this mode, the entire recording is\n    never loaded into memory at once. The underlying file may be \"released\" by\n    calling ``close`` on the recording, after which further iteration over the\n    events will not be possible (but would still be possible to access the file\n    description and header information).\n\n    The alternative is to load the entire recording on creation (in which case\n    the file handle will be closed once the recording is loaded). This can be\n    enabled by specifying ``streaming=False``. This will make it faster to\n    subsequently iterate over the events, and also will not \"hold\" the file\n    open.\n\n    .. note:: When starting a new iteration over the events in streaming mode,\n              the position in the open file will be automatically reset to the\n              beginning of the event stream. This means it's possible to iterate\n              over the events multiple times without having to re-open the\n              recording, however it is not possible to do so in parallel. If\n              parallel iteration is required, streaming should be disabled.\n\n    \"\"\"\n\n    @property\n    def duration(self):\n        if self._duration is None:\n            if self.stream:\n                events = self._iter_events()\n                try:\n                    first = last = next(events)\n                except StopIteration:\n                    self._duration = 0\n                for last in events:\n                    pass\n                self._duration = (last.time - first.time).total_seconds()\n            else:  # not streaming\n                if not self._events:\n                    self._duration = 0\n                self._duration = (self._events[-1].time\n                                  - self._events[0].time).total_seconds()\n        return self._duration\n\n    @property\n    def events(self):\n        if self.stream:\n            return self._iter_events()\n        else:\n            return self._events\n\n    def __init__(self, f, stream=True):\n        self.device_paths = []\n        self.gamepad_device = None\n        self.num_events = None\n        self.stream = stream\n        self._events = None\n        self._close_when_done = False\n        self._events_start = None\n        self._duration = None\n\n        if hasattr(f, 'name'):  # file-like object\n            self.filepath = f.name\n            self.fh = f\n        else:  # path to file\n            self.filepath = f\n            self.fh = open(self.filepath, 'rb')\n            if not self.stream:\n                self._close_when_done = True\n        try:\n            self._parse_header_and_devices(self.fh)\n            self._events_start = self.fh.tell()\n            if not self.stream:\n                self._events = list(self._iter_events())\n        finally:\n            if self._close_when_done:\n                self.close()\n\n    def close(self):\n        if self.fh is not None:\n            self.fh.close()\n            self.fh = None\n            self._events_start = None\n\n    def _parse_header_and_devices(self, fh):\n        magic, version = read_struct(fh, header_one_struct)\n        if magic != b'REVENT':\n            msg = '{} does not appear to be an revent recording'\n            raise ValueError(msg.format(self.filepath))\n        self.version = version\n\n        if 3 >= self.version >= 2:\n            self.mode, = read_struct(fh, header_two_struct)\n            if self.mode == GENERAL_MODE:\n                self._read_devices(fh)\n            elif self.mode == GAMEPAD_MODE:\n                self._read_gamepad_info(fh)\n            else:\n                raise ValueError('Unexpected recording mode: {}'.format(self.mode))\n            self.num_events, = read_struct(fh, u64_struct)\n            if self.version > 2:\n                ts_sec = read_struct(fh, u64_struct)[0]\n                ts_usec = read_struct(fh, u64_struct)[0]\n                self.start_time = datetime.fromtimestamp(ts_sec + float(ts_usec) / 1000000)\n                ts_sec = read_struct(fh, u64_struct)[0]\n                ts_usec = read_struct(fh, u64_struct)[0]\n                self.end_time = datetime.fromtimestamp(ts_sec + float(ts_usec) / 1000000)\n\n        elif 2 > self.version >= 0:\n            self.mode = GENERAL_MODE\n            self._read_devices(fh)\n        else:\n            raise ValueError('Invalid recording version: {}'.format(self.version))\n\n    def _read_devices(self, fh):\n        num_devices, = read_struct(fh, u32_struct)\n        for _ in range(num_devices):\n            self.device_paths.append(read_string(fh))\n\n    def _read_gamepad_info(self, fh):\n        self.gamepad_device = UinputDeviceInfo(fh)\n        self.device_paths.append('[GAMEPAD]')\n\n    def _iter_events(self):\n        if self.fh is None:\n            msg = 'Attempting to iterate over events of a closed recording'\n            raise RuntimeError(msg)\n        self.fh.seek(self._events_start)\n        if self.version >= 2:\n            for _ in range(self.num_events):\n                yield ReventEvent(self.fh)\n        else:\n            file_size = os.path.getsize(self.filepath)\n            while self.fh.tell() < file_size:\n                yield ReventEvent(self.fh, legacy=True)\n\n    def __iter__(self):\n        for event in self.events:\n            yield event\n\n    def __enter__(self):\n        return self\n\n    def __exit__(self, *args):\n        self.close()\n\n    def __del__(self):\n        self.close()\n\n\ndef get_revent_binary(abi):\n    resolver = ResourceResolver()\n    resolver.load()\n    resource = Executable(NO_ONE, abi, 'revent')\n    return resolver.get(resource)\n\n\nclass ReventRecorder(object):\n\n    # Share location of target executable across all instances\n    target_executable = None\n\n    def __init__(self, target):\n        self.logger = logging.getLogger(self.__class__.__name__)\n        self.target = target\n        if not ReventRecorder.target_executable:\n            ReventRecorder.target_executable = self._get_target_path(self.target)\n\n    @once_per_class\n    def deploy(self):\n        if not ReventRecorder.target_executable:\n            ReventRecorder.target_executable = self.target.get_installed('revent')\n        host_executable = get_revent_binary(self.target.abi)\n        ReventRecorder.target_executable = self.target.install(host_executable)\n\n    @once_per_class\n    def remove(self):\n        if ReventRecorder.target_executable:\n            self.target.uninstall('revent')\n\n    def start_record(self, revent_file):\n        command = f'{ReventRecorder.target_executable} record -s {revent_file}'\n        self.logger.debug('Executing record command \"%s\"...', command)\n        self.target.kick_off(command, self.target.is_rooted)\n\n    def stop_record(self):\n        self.target.killall('revent', signal.SIGINT, as_root=self.target.is_rooted)\n\n    def replay(self, revent_file, timeout=None):\n        self.target.killall('revent')\n        command = f'{ReventRecorder.target_executable} replay {revent_file}'\n        self.logger.debug('Executing replay command \"%s\" with %d seconds timeout...', command, timeout)\n        self.target.execute(command, timeout=timeout)\n\n    @memoized\n    @staticmethod\n    def _get_target_path(target):\n        return target.get_installed('revent')\n"
  },
  {
    "path": "wa/utils/serializer.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nThis module contains wrappers for Python serialization modules for\ncommon formats that make it easier to serialize/deserialize WA\nPlain Old Data structures (serilizable WA classes implement\n``to_pod()``/``from_pod()`` methods for converting between POD\nstructures and Python class instances).\n\nThe modifications to standard serilization procedures are:\n\n    - mappings are deserialized as ``OrderedDict``\\ 's rather than standard\n      Python ``dict``\\ 's. This allows for cleaner syntax in certain parts\n      of WA configuration (e.g. values to be written to files can be specified\n      as a dict, and they will be written in the order specified in the config).\n    - regular expressions are automatically encoded/decoded. This allows for\n      configuration values to be transparently specified as strings or regexes\n      in the POD config.\n\nThis module exports the \"wrapped\" versions of serialization libraries,\nand this should be imported and used instead of importing the libraries\ndirectly. i.e. ::\n\n    from wa.utils.serializer import yaml\n    pod = yaml.load(fh)\n\ninstead of ::\n\n    import yaml\n    pod = yaml.load(fh)\n\nIt's also possible to use the serializer directly::\n\n    from wa.utils import serializer\n    pod = serializer.load(fh)\n\nThis can also be used to ``dump()`` POD structures. By default,\n``dump()`` will produce JSON, but ``fmt`` parameter may be used to\nspecify an alternative format (``yaml`` or ``python``). ``load()`` will\nuse the file plugin to guess the format, but ``fmt`` may also be used\nto specify it explicitly.\n\n\"\"\"\n# pylint: disable=unused-argument\n\nimport os\nimport re\nimport json as _json\nfrom collections import OrderedDict\nfrom collections.abc import Hashable\nfrom datetime import datetime\nimport dateutil.parser\nimport yaml as _yaml  # pylint: disable=wrong-import-order\nfrom yaml import MappingNode\ntry:\n    from yaml import FullLoader as _yaml_loader\nexcept ImportError:\n    from yaml import Loader as _yaml_loader\nfrom yaml.constructor import ConstructorError\n\n\n# pylint: disable=redefined-builtin\nfrom past.builtins import basestring  # pylint: disable=wrong-import-order\n\nfrom wa.framework.exception import SerializerSyntaxError\nfrom wa.utils.misc import isiterable\nfrom wa.utils.types import regex_type, none_type, level, cpu_mask\n\n\n__all__ = [\n    'json',\n    'yaml',\n    'read_pod',\n    'dump',\n    'load',\n    'is_pod',\n    'POD_TYPES',\n]\n\nPOD_TYPES = [\n    list,\n    tuple,\n    dict,\n    set,\n    basestring,\n    str,\n    int,\n    float,\n    bool,\n    OrderedDict,\n    datetime,\n    regex_type,\n    none_type,\n    level,\n    cpu_mask,\n]\n\n\nclass WAJSONEncoder(_json.JSONEncoder):\n\n    def default(self, obj):  # pylint: disable=method-hidden,arguments-differ\n        if isinstance(obj, regex_type):\n            return 'REGEX:{}:{}'.format(obj.flags, obj.pattern)\n        elif isinstance(obj, datetime):\n            return 'DATET:{}'.format(obj.isoformat())\n        elif isinstance(obj, level):\n            return 'LEVEL:{}:{}'.format(obj.name, obj.value)\n        elif isinstance(obj, cpu_mask):\n            return 'CPUMASK:{}'.format(obj.mask())\n        else:\n            return _json.JSONEncoder.default(self, obj)\n\n\nclass WAJSONDecoder(_json.JSONDecoder):\n\n    def decode(self, s, **kwargs):  # pylint: disable=arguments-differ\n        d = _json.JSONDecoder.decode(self, s, **kwargs)\n\n        def try_parse_object(v):\n            if isinstance(v, basestring):\n                if v.startswith('REGEX:'):\n                    _, flags, pattern = v.split(':', 2)\n                    return re.compile(pattern, int(flags or 0))\n                elif v.startswith('DATET:'):\n                    _, pattern = v.split(':', 1)\n                    return dateutil.parser.parse(pattern)\n                elif v.startswith('LEVEL:'):\n                    _, name, value = v.split(':', 2)\n                    return level(name, value)\n                elif v.startswith('CPUMASK:'):\n                    _, value = v.split(':', 1)\n                    return cpu_mask(value)\n\n            return v\n\n        def load_objects(d):\n            if not hasattr(d, 'items'):\n                return d\n            pairs = []\n            for k, v in d.items():\n                if hasattr(v, 'items'):\n                    pairs.append((k, load_objects(v)))\n                elif isiterable(v):\n                    pairs.append((k, [try_parse_object(i) for i in v]))\n                else:\n                    pairs.append((k, try_parse_object(v)))\n            return OrderedDict(pairs)\n\n        return load_objects(d)\n\n\nclass json(object):\n\n    @staticmethod\n    def dump(o, wfh, indent=4, *args, **kwargs):\n        return _json.dump(o, wfh, cls=WAJSONEncoder, indent=indent, *args, **kwargs)\n\n    @staticmethod\n    def dumps(o, indent=4, *args, **kwargs):\n        return _json.dumps(o, cls=WAJSONEncoder, indent=indent, *args, **kwargs)\n\n    @staticmethod\n    def load(fh, *args, **kwargs):\n        try:\n            return _json.load(fh, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)\n        except ValueError as e:\n            raise SerializerSyntaxError(e.args[0])\n\n    @staticmethod\n    def loads(s, *args, **kwargs):\n        try:\n            return _json.loads(s, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)\n        except ValueError as e:\n            raise SerializerSyntaxError(e.args[0])\n\n\n_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG\n_regex_tag = 'tag:wa:regex'\n_level_tag = 'tag:wa:level'\n_cpu_mask_tag = 'tag:wa:cpu_mask'\n\n\ndef _wa_dict_representer(dumper, data):\n    return dumper.represent_mapping(_mapping_tag, iter(data.items()))\n\n\ndef _wa_regex_representer(dumper, data):\n    text = '{}:{}'.format(data.flags, data.pattern)\n    return dumper.represent_scalar(_regex_tag, text)\n\n\ndef _wa_level_representer(dumper, data):\n    text = '{}:{}'.format(data.name, data.level)\n    return dumper.represent_scalar(_level_tag, text)\n\n\ndef _wa_cpu_mask_representer(dumper, data):\n    return dumper.represent_scalar(_cpu_mask_tag, data.mask())\n\n\ndef _wa_regex_constructor(loader, node):\n    value = loader.construct_scalar(node)\n    flags, pattern = value.split(':', 1)\n    return re.compile(pattern, int(flags or 0))\n\n\ndef _wa_level_constructor(loader, node):\n    value = loader.construct_scalar(node)\n    name, value = value.split(':', 1)\n    return level(name, value)\n\n\ndef _wa_cpu_mask_constructor(loader, node):\n    value = loader.construct_scalar(node)\n    return cpu_mask(value)\n\n\nclass _WaYamlLoader(_yaml_loader):  # pylint: disable=too-many-ancestors\n\n    def construct_mapping(self, node, deep=False):\n        if isinstance(node, MappingNode):\n            self.flatten_mapping(node)\n        if not isinstance(node, MappingNode):\n            raise ConstructorError(None, None,\n                                   \"expected a mapping node, but found %s\" % node.id,\n                                   node.start_mark)\n        mapping = OrderedDict()\n        for key_node, value_node in node.value:\n            key = self.construct_object(key_node, deep=deep)\n            if not isinstance(key, Hashable):\n                raise ConstructorError(\"while constructing a mapping\", node.start_mark,\n                                       \"found unhashable key\", key_node.start_mark)\n            value = self.construct_object(value_node, deep=deep)\n            mapping[key] = value\n        return mapping\n\n\n_yaml.add_representer(OrderedDict, _wa_dict_representer)\n_yaml.add_representer(regex_type, _wa_regex_representer)\n_yaml.add_representer(level, _wa_level_representer)\n_yaml.add_representer(cpu_mask, _wa_cpu_mask_representer)\n_yaml.add_constructor(_regex_tag, _wa_regex_constructor, Loader=_WaYamlLoader)\n_yaml.add_constructor(_level_tag, _wa_level_constructor, Loader=_WaYamlLoader)\n_yaml.add_constructor(_cpu_mask_tag, _wa_cpu_mask_constructor, Loader=_WaYamlLoader)\n_yaml.add_constructor(_mapping_tag, _WaYamlLoader.construct_yaml_map, Loader=_WaYamlLoader)\n\n\nclass yaml(object):\n\n    @staticmethod\n    def dump(o, wfh, *args, **kwargs):\n        return _yaml.dump(o, wfh, *args, **kwargs)\n\n    @staticmethod\n    def load(fh, *args, **kwargs):\n        try:\n            return _yaml.load(fh, *args, Loader=_WaYamlLoader, **kwargs)\n        except _yaml.YAMLError as e:\n            lineno = None\n            if hasattr(e, 'problem_mark'):\n                lineno = e.problem_mark.line  # pylint: disable=no-member\n            message = e.args[0] if (e.args and e.args[0]) else str(e)\n            raise SerializerSyntaxError(message, lineno)\n\n    loads = load\n\n\nclass python(object):\n\n    @staticmethod\n    def dump(o, wfh, *args, **kwargs):\n        raise NotImplementedError()\n\n    @classmethod\n    def load(cls, fh, *args, **kwargs):\n        return cls.loads(fh.read())\n\n    @staticmethod\n    def loads(s, *args, **kwargs):\n        pod = {}\n        try:\n            exec(s, pod)  # pylint: disable=exec-used\n        except SyntaxError as e:\n            raise SerializerSyntaxError(e.message, e.lineno)\n        for k in list(pod.keys()):  # pylint: disable=consider-iterating-dictionary\n            if k.startswith('__'):\n                del pod[k]\n        return pod\n\n\ndef read_pod(source, fmt=None):\n    if isinstance(source, str):\n        with open(source) as fh:\n            return _read_pod(fh, fmt)\n    elif hasattr(source, 'read') and (hasattr(source, 'name') or fmt):\n        return _read_pod(source, fmt)\n    else:\n        message = 'source must be a path or an open file handle; got {}'\n        raise ValueError(message.format(type(source)))\n\n\ndef write_pod(pod, dest, fmt=None):\n    if isinstance(dest, str):\n        with open(dest, 'w') as wfh:\n            return _write_pod(pod, wfh, fmt)\n    elif hasattr(dest, 'write') and (hasattr(dest, 'name') or fmt):\n        return _write_pod(pod, dest, fmt)\n    else:\n        message = 'dest must be a path or an open file handle; got {}'\n        raise ValueError(message.format(type(dest)))\n\n\ndef dump(o, wfh, fmt='json', *args, **kwargs):\n    serializer = {'yaml': yaml,\n                  'json': json,\n                  'python': python,\n                  'py': python,\n                  }.get(fmt)\n    if serializer is None:\n        raise ValueError('Unknown serialization format: \"{}\"'.format(fmt))\n    serializer.dump(o, wfh, *args, **kwargs)\n\n\ndef load(s, fmt='json', *args, **kwargs):\n    return read_pod(s, fmt=fmt)\n\n\ndef _read_pod(fh, fmt=None):\n    if fmt is None:\n        fmt = os.path.splitext(fh.name)[1].lower().strip('.')\n        if fmt == '':\n            # Special case of no given file extension\n            message = (\"Could not determine format \"\n                       \"from file extension for \\\"{}\\\". \"\n                       \"Please specify it or modify the fmt parameter.\")\n            raise ValueError(message.format(getattr(fh, 'name', '<none>')))\n    if fmt == 'yaml':\n        return yaml.load(fh)\n    elif fmt == 'json':\n        return json.load(fh)\n    elif fmt == 'py':\n        return python.load(fh)\n    else:\n        raise ValueError('Unknown format \"{}\": {}'.format(fmt, getattr(fh, 'name', '<none>')))\n\n\ndef _write_pod(pod, wfh, fmt=None):\n    if fmt is None:\n        fmt = os.path.splitext(wfh.name)[1].lower().strip('.')\n    if fmt == 'yaml':\n        return yaml.dump(pod, wfh)\n    elif fmt == 'json':\n        return json.dump(pod, wfh)\n    elif fmt == 'py':\n        raise ValueError('Serializing to Python is not supported')\n    else:\n        raise ValueError('Unknown format \"{}\": {}'.format(fmt, getattr(wfh, 'name', '<none>')))\n\n\ndef is_pod(obj):\n    if type(obj) not in POD_TYPES:  # pylint: disable=unidiomatic-typecheck\n        return False\n    if hasattr(obj, 'items'):\n        for k, v in obj.items():\n            if not (is_pod(k) and is_pod(v)):\n                return False\n    elif isiterable(obj):\n        for v in obj:\n            if not is_pod(v):\n                return False\n    return True\n\n\nclass Podable(object):\n\n    _pod_serialization_version = 0\n\n    @classmethod\n    def from_pod(cls, pod):\n        pod = cls._upgrade_pod(pod)\n        instance = cls()\n        instance._pod_version = pod.pop('_pod_version')  # pylint: disable=protected-access\n        return instance\n\n    @classmethod\n    def _upgrade_pod(cls, pod):\n        _pod_serialization_version = pod.pop('_pod_serialization_version', None) or 0\n        while _pod_serialization_version < cls._pod_serialization_version:\n            _pod_serialization_version += 1\n            upgrade = getattr(cls, '_pod_upgrade_v{}'.format(_pod_serialization_version))\n            pod = upgrade(pod)\n        return pod\n\n    def __init__(self):\n        self._pod_version = self._pod_serialization_version\n\n    def to_pod(self):\n        pod = {}\n        pod['_pod_version'] = self._pod_version\n        pod['_pod_serialization_version'] = self._pod_serialization_version\n        return pod\n"
  },
  {
    "path": "wa/utils/terminalsize.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# Adapted from\n# https://gist.github.com/jtriley/1108174\n# pylint: disable=bare-except,unpacking-non-sequence\nimport os\nimport shlex\nimport struct\nimport platform\nimport subprocess\n\n\ndef get_terminal_size():\n    \"\"\" getTerminalSize()\n     - get width and height of console\n     - works on linux,os x,windows,cygwin(windows)\n     originally retrieved from:\n     http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python\n    \"\"\"\n    current_os = platform.system()\n    tuple_xy = None\n    if current_os == 'Windows':\n        tuple_xy = _get_terminal_size_windows()\n        if tuple_xy is None:\n            # needed for window's python in cygwin's xterm\n            tuple_xy = _get_terminal_size_tput()\n    if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'):\n        tuple_xy = _get_terminal_size_linux()\n    if tuple_xy is None or tuple_xy == (0, 0):\n        tuple_xy = (80, 25)      # assume \"standard\" terminal\n    return tuple_xy\n\n\ndef _get_terminal_size_windows():\n    # pylint: disable=unused-variable,redefined-outer-name,too-many-locals, import-outside-toplevel\n    try:\n        from ctypes import windll, create_string_buffer\n        # stdin handle is -10\n        # stdout handle is -11\n        # stderr handle is -12\n        h = windll.kernel32.GetStdHandle(-12)\n        csbi = create_string_buffer(22)\n        res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)\n        if res:\n            (bufx, bufy, curx, cury, wattr,\n             left, top, right, bottom,\n             maxx, maxy) = struct.unpack(\"hhhhHhhhhhh\", csbi.raw)\n            sizex = right - left + 1\n            sizey = bottom - top + 1\n            return sizex, sizey\n    except:  # NOQA\n        pass\n\n\ndef _get_terminal_size_tput():\n    # get terminal width\n    # src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window\n    try:\n        cols = int(subprocess.check_call(shlex.split('tput cols')))\n        rows = int(subprocess.check_call(shlex.split('tput lines')))\n        return (cols, rows)\n    except:  # NOQA\n        pass\n\n\ndef _get_terminal_size_linux():\n    # pylint: disable=import-outside-toplevel\n    def ioctl_GWINSZ(fd):\n        try:\n            import fcntl\n            import termios\n            cr = struct.unpack('hh',\n                               fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))\n            return cr\n        except:  # NOQA\n            pass\n    cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)\n    if not cr:\n        try:\n            fd = os.open(os.ctermid(), os.O_RDONLY)\n            cr = ioctl_GWINSZ(fd)\n            os.close(fd)\n        except:   # NOQA\n            pass\n    if not cr:\n        try:\n            cr = (os.environ['LINES'], os.environ['COLUMNS'])\n        except:  # NOQA\n            return None\n    return int(cr[1]), int(cr[0])\n\n\nif __name__ == \"__main__\":\n    sizex, sizey = get_terminal_size()\n    print('width =', sizex, 'height =', sizey)\n"
  },
  {
    "path": "wa/utils/trace_cmd.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport re\nimport logging\nfrom itertools import chain\n\nfrom devlib.collector.ftrace import TRACE_MARKER_START, TRACE_MARKER_STOP\n\nfrom wa.utils.misc import isiterable\nfrom wa.utils.types import numeric\n\n\nlogger = logging.getLogger('trace-cmd')\n\n\nclass TraceCmdEvent(object):\n    \"\"\"\n    A single trace-cmd event. This will appear in the trace cmd report in the format ::\n\n          <idle>-0     [000]  3284.126993: sched_rq_runnable_load: cpu=0 load=54\n             |           |         |              |                |___________|\n             |           |         |              |                      |\n          thread        cpu    timestamp        name                    body\n\n    \"\"\"\n\n    __slots__ = ['thread', 'reporting_cpu_id', 'timestamp', 'name', 'text', 'fields']\n\n    def __init__(self, thread, cpu_id, ts, name, body, parser=None):\n        \"\"\"\n        parameters:\n\n        :thread: thread which generated the event\n        :cpu: cpu on which the event has occurred\n        :ts: timestamp of the event\n        :name: the name of the event\n        :bodytext: a string with the rest of the event text\n        :parser: optionally, a function that will parse body text to populate\n                 this event's attributes\n\n        The parser can be any callable that can be invoked with\n\n            parser(event, text)\n\n        Where ``event`` is this TraceCmdEvent instance, and ``text`` is the body text to be\n        parsed. The parser should updated the passed event instance and not return anything\n        (the return value will be ignored). Any exceptions raised by the parser will be silently\n        ignored (note that this means that the event's attributes may be partially initialized).\n\n        \"\"\"\n        self.thread = thread\n        self.reporting_cpu_id = int(cpu_id)\n        self.timestamp = numeric(ts)\n        self.name = name\n        self.text = body\n        self.fields = {}\n\n        if parser:\n            try:\n                parser(self, self.text)\n            except Exception:  # pylint: disable=broad-except\n                # unknown format assume user does not care or know how to\n                # parse self.text\n                pass\n\n    def __getattr__(self, name):\n        try:\n            return self.fields[name]\n        except KeyError:\n            raise AttributeError(name)\n\n    def __str__(self):\n        return 'TE({} @ {})'.format(self.name, self.timestamp)\n\n    __repr__ = __str__\n\n\nclass DroppedEventsEvent(object):\n\n    __slots__ = ['thread', 'reporting_cpu_id', 'timestamp', 'name', 'text', 'fields']\n\n    def __init__(self, cpu_id):\n        self.thread = None\n        self.reporting_cpu_id = None\n        self.timestamp = None\n        self.name = 'DROPPED EVENTS DETECTED'\n        self.text = None\n        self.fields = {'cpu_id': int(cpu_id)}\n\n    def __getattr__(self, name):\n        try:\n            return self.fields[name]\n        except KeyError:\n            raise AttributeError(name)\n\n    def __str__(self):\n        return 'DROPPED_EVENTS_ON_CPU{}'.format(self.cpu_id)\n\n    __repr__ = __str__\n\n\ndef try_convert_to_numeric(v):\n    try:\n        if isiterable(v):\n            return list(map(numeric, v))\n        else:\n            return numeric(v)\n    except ValueError:\n        return v\n\n\ndef default_body_parser(event, text):\n    \"\"\"\n    Default parser to attempt to use to parser body text for the event (i.e. after\n    the \"header\" common to all events has been parsed). This assumes that the body is\n    a whitespace-separated list of key=value pairs. The parser will attempt to convert\n    the value into a numeric type, and failing that, keep it as string.\n\n    \"\"\"\n    parts = [e.rsplit(' ', 1) for e in text.strip().split('=')]\n    parts = [p.strip() for p in chain.from_iterable(parts)]\n    if not len(parts) % 2:\n        i = iter(parts)\n        for k, v in zip(i, i):\n            try:\n                v = int(v)\n            except ValueError:\n                pass\n            event.fields[k] = v\n\n\ndef regex_body_parser(regex, flags=0):\n    \"\"\"\n    Creates an event body parser form the specified regular expression (could be an\n    ``re.RegexObject``, or a string). The regular expression should contain some named\n    groups, as those will be extracted as the event attributes (unnamed groups and the\n    reset of the match will be ignored).\n\n    If the specified regex is a string, it will be compiled, in which case ``flags`` may\n    be provided for the resulting regex object (see ``re`` standard module documentation).\n    If regex is a pre-compiled object, flags will be ignored.\n\n    \"\"\"\n    if isinstance(regex, str):\n        regex = re.compile(regex, flags)\n\n    def regex_parser_func(event, text):\n        match = regex.search(text)\n        if match:\n            for k, v in match.groupdict().items():\n                try:\n                    event.fields[k] = int(v)\n                except ValueError:\n                    event.fields[k] = v\n\n    return regex_parser_func\n\n\ndef sched_switch_parser(event, text):\n    \"\"\"\n    Sched switch output may be presented in a couple of different formats. One is handled\n    by a regex. The other format can *almost* be handled by the default parser, if it\n    weren't for the ``==>`` that appears in the middle.\n    \"\"\"\n    if text.count('=') == 2:  # old format\n        regex = re.compile(\n            r'(?P<prev_comm>\\S.*):(?P<prev_pid>\\d+) \\[(?P<prev_prio>\\d+)\\] (?P<status>\\S+)'\n            r' ==> '\n            r'(?P<next_comm>\\S.*):(?P<next_pid>\\d+) \\[(?P<next_prio>\\d+)\\]'\n        )\n        parser_func = regex_body_parser(regex)\n        return parser_func(event, text)\n    else:  # there are more than two \"=\" -- new format\n        return default_body_parser(event, text.replace('==>', ''))\n\n\ndef sched_stat_parser(event, text):\n    \"\"\"\n    sched_stat_* events unclude the units, \"[ns]\", in an otherwise\n    regular key=value sequence; so the units  need to be stripped out first.\n    \"\"\"\n    return default_body_parser(event, text.replace(' [ns]', ''))\n\n\ndef sched_wakeup_parser(event, text):\n    regex = re.compile(r'(?P<comm>\\S+):(?P<pid>\\d+) \\[(?P<prio>\\d+)\\] success=(?P<success>\\d) CPU:(?P<cpu>\\d+)')\n    parse_func = regex_body_parser(regex)\n    return parse_func(event, text)\n\n\n# Maps event onto the corresponding parser for its body text. A parser may be\n# a callable with signature\n#\n#   parser(event, bodytext)\n#\n# a re.RegexObject, or a string (in which case it will be compiled into a\n# regex). In case of a string/regex, its named groups will be used to populate\n# the event's attributes.\nEVENT_PARSER_MAP = {\n    'sched_stat_blocked': sched_stat_parser,\n    'sched_stat_iowait': sched_stat_parser,\n    'sched_stat_runtime': sched_stat_parser,\n    'sched_stat_sleep': sched_stat_parser,\n    'sched_stat_wait': sched_stat_parser,\n    'sched_switch': sched_switch_parser,\n    'sched_wakeup': sched_wakeup_parser,\n    'sched_wakeup_new': sched_wakeup_parser,\n}\n\nTRACE_EVENT_REGEX = re.compile(r'^\\s+(?P<thread>\\S+.*?\\S+)\\s+\\[(?P<cpu_id>\\d+)\\]\\s+(?P<ts>[\\d.]+):\\s+'\n                               r'(?P<name>[^:]+):\\s+(?P<body>.*?)\\s*$')\n\nHEADER_REGEX = re.compile(r'^\\s*(?:version|cpus)\\s*=\\s*([\\d.]+)\\s*$')\n\nDROPPED_EVENTS_REGEX = re.compile(r'CPU:(?P<cpu_id>\\d+) \\[\\d*\\s*EVENTS DROPPED\\]')\n\nEMPTY_CPU_REGEX = re.compile(r'CPU \\d+ is empty')\n\n\nclass TraceCmdParser(object):\n    \"\"\"\n    A parser for textual representation of ftrace as reported by trace-cmd\n\n    \"\"\"\n\n    def __init__(self, filter_markers=True, check_for_markers=True, events=None):\n        \"\"\"\n        Initialize a new trace parser.\n\n        :param filter_markers: Specifies whether the trace before the start\n                               marker and after the stop marker should be\n                               filtered out (so only events between the two\n                               markers will be reported). This maybe overriden\n                               based on `check_for_markers` parameter of\n                               `parse()`\n        :param check_for_markers: Check if the start/stop markers are present\n                                  in the trace and ensure that `filter_markers`\n                                  is `False` if they aren't\n        :param events: A list of event names to be reported; if not specified,\n                       all events will be reported.\n\n\n        \"\"\"\n        self.filter_markers = filter_markers\n        self.check_for_markers = check_for_markers\n        self.events = events\n\n    def parse(self, filepath):  # pylint: disable=too-many-branches,too-many-locals\n        \"\"\"\n        This is a generator for the trace event stream.\n\n        :param filepath: The path to the file containg text trace as reported\n                         by trace-cmd\n        \"\"\"\n        inside_maked_region = False\n        # pylint: disable=superfluous-parens\n        filters = [re.compile('^{}$'.format(e)) for e in (self.events or [])]\n        filter_markers = self.filter_markers\n        if filter_markers and self.check_for_markers:\n            with open(filepath) as fh:\n                for line in fh:\n                    if TRACE_MARKER_START in line:\n                        break\n                else:\n                    # maker not found force filtering by marker to False\n                    filter_markers = False\n\n        with open(filepath) as fh:\n            for line in fh:\n                # if processing trace markers, skip marker lines as well as all\n                # lines outside marked region\n                if filter_markers:\n                    if not inside_maked_region:\n                        if TRACE_MARKER_START in line:\n                            inside_maked_region = True\n                        continue\n                    elif TRACE_MARKER_STOP in line:\n                        inside_maked_region = False\n                        continue\n\n                match = DROPPED_EVENTS_REGEX.search(line)\n                if match:\n                    yield DroppedEventsEvent(match.group('cpu_id'))\n                    continue\n\n                matched = False\n                for rx in [HEADER_REGEX, EMPTY_CPU_REGEX]:\n                    match = rx.search(line)\n                    if match:\n                        logger.debug(line.strip())\n                        matched = True\n                        break\n                if matched:\n                    continue\n\n                match = TRACE_EVENT_REGEX.search(line)\n                if not match:\n                    logger.warning('Invalid trace event: \"{}\"'.format(line))\n                    continue\n\n                event_name = match.group('name')\n\n                if filters:\n                    found = False\n                    for f in filters:\n                        if f.search(event_name):\n                            found = True\n                            break\n                    if not found:\n                        continue\n\n                body_parser = EVENT_PARSER_MAP.get(event_name, default_body_parser)\n                if isinstance(body_parser, (str, re.Pattern)):  # pylint: disable=protected-access\n                    body_parser = regex_body_parser(body_parser)\n                yield TraceCmdEvent(parser=body_parser, **match.groupdict())\n\n\ndef trace_has_marker(filepath, max_lines_to_check=2000000):\n    with open(filepath) as fh:\n        for i, line in enumerate(fh):\n            if TRACE_MARKER_START in line:\n                return True\n            if i >= max_lines_to_check:\n                break\n    return False\n"
  },
  {
    "path": "wa/utils/types.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\"\"\"\nRoutines for doing various type conversions. These usually embody some\nhigher-level semantics than are present in standard Python types (e.g.\n``boolean`` will convert the string ``\"false\"`` to ``False``, where as\nnon-empty strings are usually considered to be ``True``).\n\nA lot of these are intended to specify type conversions declaratively in place\nlike ``Parameter``'s ``kind`` argument. These are basically \"hacks\" around the\nfact that Python is not the best language to use for configuration.\n\n\"\"\"\nimport os\nimport re\nimport numbers\nimport shlex\nfrom bisect import insort\nfrom urllib.parse import quote, unquote  # pylint: disable=no-name-in-module, import-error\n# pylint: disable=wrong-import-position\nfrom collections import defaultdict\nfrom collections.abc import MutableMapping\nfrom functools import total_ordering\n\nfrom past.builtins import basestring  # pylint: disable=redefined-builtin\nfrom future.utils import with_metaclass\n\nfrom devlib.utils.types import identifier, boolean, integer, numeric, caseless_string\n\nfrom wa.utils.misc import (isiterable, list_to_ranges, list_to_mask,\n                           mask_to_list, ranges_to_list)\n\n\ndef list_of_strs(value):\n    \"\"\"\n    Value must be iterable. All elements will be converted to strings.\n\n    \"\"\"\n    if not isiterable(value):\n        raise ValueError(value)\n    return list(map(str, value))\n\n\nlist_of_strings = list_of_strs\n\n\ndef list_of_ints(value):\n    \"\"\"\n    Value must be iterable. All elements will be converted to ``int``\\ s.\n\n    \"\"\"\n    if not isiterable(value):\n        raise ValueError(value)\n    return list(map(int, value))\n\n\nlist_of_integers = list_of_ints\n\n\ndef list_of_numbers(value):\n    \"\"\"\n    Value must be iterable. All elements will be converted to numbers (either ``ints`` or\n    ``float``\\ s depending on the elements).\n\n    \"\"\"\n    if not isiterable(value):\n        raise ValueError(value)\n    return list(map(numeric, value))\n\n\ndef list_of_bools(value, interpret_strings=True):\n    \"\"\"\n    Value must be iterable. All elements will be converted to ``bool``\\ s.\n\n    .. note:: By default, ``boolean()`` conversion function will be used, which\n              means that strings like ``\"0\"`` or ``\"false\"`` will be\n              interpreted as ``False``. If this is undesirable, set\n              ``interpret_strings`` to ``False``.\n\n    \"\"\"\n    if not isiterable(value):\n        raise ValueError(value)\n    if interpret_strings:\n        return list(map(boolean, value))\n    else:\n        return list(map(bool, value))\n\n\ndef list_of(type_):\n    \"\"\"Generates a \"list of\" callable for the specified type. The callable\n    attempts to convert all elements in the passed value to the specified\n    ``type_``, raising ``ValueError`` on error.\"\"\"\n    def __init__(self, values):\n        list.__init__(self, list(map(type_, values)))\n\n    def append(self, value):\n        list.append(self, type_(value))\n\n    def extend(self, other):\n        list.extend(self, list(map(type_, other)))\n\n    def from_pod(cls, pod):\n        return cls(list(map(type_, pod)))\n\n    def _to_pod(self):\n        return self\n\n    def __setitem__(self, idx, value):\n        list.__setitem__(self, idx, type_(value))\n\n    return type('list_of_{}s'.format(type_.__name__),\n                (list, ), {\n                    \"__init__\": __init__,\n                    \"__setitem__\": __setitem__,\n                    \"append\": append,\n                    \"extend\": extend,\n                    \"to_pod\": _to_pod,\n                    \"from_pod\": classmethod(from_pod),\n    })\n\n\ndef list_or_string(value):\n    \"\"\"\n    Converts the value into a list of strings. If the value is not iterable,\n    a one-element list with stringified value will be returned.\n\n    \"\"\"\n    if isinstance(value, str):\n        return [value]\n    else:\n        try:\n            return list(value)\n        except ValueError:\n            return [str(value)]\n\n\ndef list_or_caseless_string(value):\n    \"\"\"\n    Converts the value into a list of ``caseless_string``'s. If the value is\n    not iterable a one-element list with stringified value will be returned.\n\n    \"\"\"\n    if isinstance(value, str):\n        return [caseless_string(value)]\n    else:\n        try:\n            return list(map(caseless_string, value))\n        except ValueError:\n            return [caseless_string(value)]\n\n\ndef list_or(type_):\n    \"\"\"\n    Generator for \"list or\" types. These take either a single value or a list\n    values and return a list of the specified ``type_`` performing the\n    conversion on the value (if a single value is specified) or each of the\n    elements of the specified list.\n\n    \"\"\"\n    list_type = list_of(type_)\n\n    class list_or_type(list_type):\n        def __init__(self, value):\n            # pylint: disable=non-parent-init-called,super-init-not-called\n            if isiterable(value):\n                list_type.__init__(self, value)\n            else:\n                list_type.__init__(self, [value])\n    return list_or_type\n\n\nlist_or_integer = list_or(integer)\nlist_or_number = list_or(numeric)\nlist_or_bool = list_or(boolean)\n\n\nregex_type = type(re.compile(''))\nnone_type = type(None)\n\n\ndef regex(value):\n    \"\"\"\n    Regular expression. If value is a string, it will be complied with no\n    flags. If you want to specify flags, value must be precompiled.\n\n    \"\"\"\n    if isinstance(value, regex_type):\n        return value\n    else:\n        return re.compile(value)\n\n\ndef version_tuple(v):\n    \"\"\"\n    Converts a version string into a tuple of strings that can be used for\n    natural comparison allowing delimeters of \"-\" and \".\".\n    \"\"\"\n    v = v.replace('-', '.')\n    return tuple(map(str, (v.split(\".\"))))\n\n\ndef module_name_set(l):  # noqa: E741\n    \"\"\"\n    Converts a list of target modules into a set of module names, disregarding\n    any configuration that may be present.\n    \"\"\"\n    modules = set()\n    for m in l:\n        if m and isinstance(m, dict):\n            modules.update(m.keys())\n        else:\n            modules.add(m)\n    return modules\n\n\n__counters = defaultdict(int)\n\n\ndef reset_counter(name=None, value=0):\n    __counters[name] = value\n\n\ndef reset_all_counters(value=0):\n    for k in __counters:\n        reset_counter(k, value)\n\n\ndef counter(name=None):\n    \"\"\"\n    An auto incrementing value (kind of like an AUTO INCREMENT field in SQL).\n    Optionally, the name of the counter to be used is specified (each counter\n    increments separately).\n\n    Counts start at 1, not 0.\n\n    \"\"\"\n    __counters[name] += 1\n    value = __counters[name]\n    return value\n\n\nclass arguments(list):\n    \"\"\"\n    Represents command line arguments to be passed to a program.\n\n    \"\"\"\n\n    def __init__(self, value=None):\n        if isiterable(value):\n            super(arguments, self).__init__(list(map(str, value)))\n        elif isinstance(value, str):\n            posix = os.name != 'nt'\n            super(arguments, self).__init__(shlex.split(value, posix=posix))\n        elif value is None:\n            super(arguments, self).__init__()\n        else:\n            super(arguments, self).__init__([str(value)])\n\n    def append(self, value):\n        return super(arguments, self).append(str(value))\n\n    def extend(self, values):\n        return super(arguments, self).extend(list(map(str, values)))\n\n    def __str__(self):\n        return ' '.join(self)\n\n\nclass prioritylist(object):\n\n    def __init__(self):\n        \"\"\"\n        Returns an OrderedReceivers object that externally behaves\n        like a list but it maintains the order of its elements\n        according to their priority.\n        \"\"\"\n        self.elements = defaultdict(list)\n        self.is_ordered = True\n        self.priorities = []\n        self.size = 0\n        self._cached_elements = None\n\n    def add(self, new_element, priority=0):\n        \"\"\"\n        adds a new item in the list.\n\n        - ``new_element`` the element to be inserted in the prioritylist\n        - ``priority`` is the priority of the element which specifies its\n        order within the List\n        \"\"\"\n        self._add_element(new_element, priority)\n\n    def add_before(self, new_element, element):\n        priority, index = self._priority_index(element)\n        self._add_element(new_element, priority, index)\n\n    def add_after(self, new_element, element):\n        priority, index = self._priority_index(element)\n        self._add_element(new_element, priority, index + 1)\n\n    def index(self, element):\n        return self._to_list().index(element)\n\n    def remove(self, element):\n        index = self.index(element)\n        self.__delitem__(index)\n\n    def _priority_index(self, element):\n        for priority, elements in self.elements.items():\n            if element in elements:\n                return (priority, elements.index(element))\n        raise IndexError(element)\n\n    def _to_list(self):\n        if self._cached_elements is None:\n            self._cached_elements = []\n            for priority in self.priorities:\n                self._cached_elements += self.elements[priority]\n        return self._cached_elements\n\n    def _add_element(self, element, priority, index=None):\n        if index is None:\n            self.elements[priority].append(element)\n        else:\n            self.elements[priority].insert(index, element)\n        self.size += 1\n        self._cached_elements = None\n        if priority not in self.priorities:\n            insort(self.priorities, priority)\n\n    def _delete(self, priority, priority_index):\n        del self.elements[priority][priority_index]\n        self.size -= 1\n        if not self.elements[priority]:\n            self.priorities.remove(priority)\n        self._cached_elements = None\n\n    def __iter__(self):\n        for priority in reversed(self.priorities):  # highest priority first\n            for element in self.elements[priority]:\n                yield element\n\n    def __getitem__(self, index):\n        return self._to_list()[index]\n\n    def __delitem__(self, index):\n        if isinstance(index, numbers.Integral):\n            index = int(index)\n            if index < 0:\n                index_range = [len(self) + index]\n            else:\n                index_range = [index]\n        elif isinstance(index, slice):\n            index_range = list(range(index.start or 0, index.stop, index.step or 1))\n        else:\n            raise ValueError('Invalid index {}'.format(index))\n        current_global_offset = 0\n        priority_counts = dict(zip(self.priorities, [len(self.elements[p])\n                                                     for p in self.priorities]))\n        for priority in self.priorities:\n            if not index_range:\n                break\n            priority_offset = 0\n            while index_range:\n                del_index = index_range[0]\n                if priority_counts[priority] + current_global_offset <= del_index:\n                    current_global_offset += priority_counts[priority]\n                    break\n                within_priority_index = del_index - \\\n                    (current_global_offset + priority_offset)\n                self._delete(priority, within_priority_index)\n                priority_offset += 1\n                index_range.pop(0)\n\n    def __len__(self):\n        return self.size\n\n\nclass toggle_set(set):\n    \"\"\"\n    A set that contains items to enable or disable something.\n\n    A prefix of ``~`` is used to denote disabling something, for example\n    the list ['apples', '~oranges', 'cherries'] enables both ``apples``\n    and ``cherries`` but disables ``oranges``.\n    \"\"\"\n\n    @staticmethod\n    def from_pod(pod):\n        return toggle_set(pod)\n\n    @staticmethod\n    def merge(dest, source):\n        if '~~' in source:\n            return toggle_set(source)\n\n        dest = toggle_set(dest)\n        for item in source:\n            if item not in dest:\n                #Disable previously enabled item\n                if item.startswith('~') and item[1:] in dest:\n                    dest.remove(item[1:])\n                #Enable previously disabled item\n                if not item.startswith('~') and ('~' + item) in dest:\n                    dest.remove('~' + item)\n                dest.add(item)\n        return dest\n\n    def __init__(self, *args):\n        if args:\n            value = args[0]\n            if isinstance(value, str):\n                msg = 'invalid type for toggle_set: \"{}\"'\n                raise TypeError(msg.format(type(value)))\n            updated_value = []\n            for v in value:\n                if v.startswith('~') and v[1:] in updated_value:\n                    updated_value.remove(v[1:])\n                elif not v.startswith('~') and ('~' + v) in updated_value:\n                    updated_value.remove(('~' + v))\n                updated_value.append(v)\n            args = tuple([updated_value] + list(args[1:]))\n        set.__init__(self, *args)\n\n    def merge_with(self, other):\n        return toggle_set.merge(self, other)\n\n    def merge_into(self, other):\n        return toggle_set.merge(other, self)\n\n    def add(self, item):\n        if item not in self:\n            #Disable previously enabled item\n            if item.startswith('~') and item[1:] in self:\n                self.remove(item[1:])\n            #Enable previously disabled item\n            if not item.startswith('~') and ('~' + item) in self:\n                self.remove('~' + item)\n            super(toggle_set, self).add(item)\n\n    def values(self):\n        \"\"\"\n        returns a list of enabled items.\n        \"\"\"\n        return {item for item in self if not item.startswith('~')}\n\n    def conflicts_with(self, other):\n        \"\"\"\n        Checks if any items in ``other`` conflict with items already in this list.\n\n        Args:\n            other (list): The list to be checked against\n\n        Returns:\n            A list of items in ``other`` that conflict with items in this list\n        \"\"\"\n        conflicts = []\n        for item in other:\n            if item.startswith('~') and item[1:] in self:\n                conflicts.append(item)\n            if not item.startswith('~') and ('~' + item) in self:\n                conflicts.append(item)\n        return conflicts\n\n    def to_pod(self):\n        return list(self.values())\n\n\nclass ID(str):\n\n    def merge_with(self, other):\n        return '_'.join([self, other])\n\n    def merge_into(self, other):\n        return '_'.join([other, self])\n\n\nclass obj_dict(MutableMapping):\n    \"\"\"\n    An object that behaves like a dict but each dict entry can also be accessed\n    as an attribute.\n\n    :param not_in_dict: A list of keys that can only be accessed as attributes\n\n    \"\"\"\n\n    @staticmethod\n    def from_pod(pod):\n        return obj_dict(pod)\n\n    # pylint: disable=super-init-not-called\n    def __init__(self, values=None, not_in_dict=None):\n        self.__dict__['dict'] = dict(values or {})\n        self.__dict__['not_in_dict'] = not_in_dict if not_in_dict is not None else []\n\n    def to_pod(self):\n        return self.__dict__['dict']\n\n    def __getitem__(self, key):\n        if key in self.not_in_dict:\n            msg = '\"{}\" is in the list keys that can only be accessed as attributes'\n            raise KeyError(msg.format(key))\n        return self.__dict__['dict'][key]\n\n    def __setitem__(self, key, value):\n        self.__dict__['dict'][key] = value\n\n    def __delitem__(self, key):\n        del self.__dict__['dict'][key]\n\n    def __len__(self):\n        return sum(1 for _ in self)\n\n    def __iter__(self):\n        for key in self.__dict__['dict']:\n            if key not in self.__dict__['not_in_dict']:\n                yield key\n\n    def __repr__(self):\n        return repr(dict(self))\n\n    def __str__(self):\n        return str(dict(self))\n\n    def __setattr__(self, name, value):\n        self.__dict__['dict'][name] = value\n\n    def __delattr__(self, name):\n        if name in self:\n            del self.__dict__['dict'][name]\n        else:\n            raise AttributeError(\"No such attribute: \" + name)\n\n    def __getattr__(self, name):\n        if 'dict' not in self.__dict__:\n            raise AttributeError(\"No such attribute: \" + name)\n        if name in self.__dict__['dict']:\n            return self.__dict__['dict'][name]\n        else:\n            raise AttributeError(\"No such attribute: \" + name)\n\n\n@total_ordering\nclass level(object):\n    \"\"\"\n    A level has a name and behaves like a string when printed, however it also\n    has a numeric value which is used in ordering comparisons.\n\n    \"\"\"\n\n    @staticmethod\n    def from_pod(pod):\n        name, value_part = pod.split('(')\n        return level(name, numeric(value_part.rstrip(')')))\n\n    def __init__(self, name, value):\n        self.name = caseless_string(name)\n        self.value = numeric(value)\n\n    def to_pod(self):\n        return repr(self)\n\n    def __str__(self):\n        return str(self.name)\n\n    def __repr__(self):\n        return '{}({})'.format(self.name, self.value)\n\n    def __hash__(self):\n        return hash(self.name)\n\n    def __eq__(self, other):\n        if isinstance(other, level):\n            return self.value == other.value\n        elif isinstance(other, basestring):\n            return self.name == other\n        else:\n            return self.value == other\n\n    def __lt__(self, other):\n        if isinstance(other, level):\n            return self.value < other.value\n        elif isinstance(other, basestring):\n            return self.name < other\n        else:\n            return self.value < other\n\n    def __ne__(self, other):\n        if isinstance(other, level):\n            return self.value != other.value\n        elif isinstance(other, basestring):\n            return self.name != other\n        else:\n            return self.value != other\n\n\nclass _EnumMeta(type):\n\n    def __str__(cls):\n        return str(cls.levels)\n\n    def __getattr__(cls, name):\n        name = name.lower()\n        if name in cls.__dict__:\n            return cls.__dict__[name]\n\n\ndef enum(args, start=0, step=1):\n    \"\"\"\n    Creates a class with attributes named by the first argument.\n    Each attribute is a ``level`` so they behave is integers in comparisons.\n    The value of the first attribute is specified by the second argument\n    (``0`` if not specified).\n\n    ::\n        MyEnum = enum(['A', 'B', 'C'])\n\n    is roughly equivalent of::\n\n        class MyEnum(object):\n            A = 0\n            B = 1\n            C = 2\n\n    however it also implement some specialized behaviors for comparisons and\n    instantiation.\n\n    \"\"\"\n\n    class Enum(with_metaclass(_EnumMeta, object)):\n\n        @classmethod\n        def from_pod(cls, pod):\n            lv = level.from_pod(pod)\n            for enum_level in cls.levels:\n                if enum_level == lv:\n                    return enum_level\n            msg = 'Unexpected value \"{}\" for enum.'\n            raise ValueError(msg.format(pod))\n\n        def __new__(cls, name):\n            for attr_name in dir(cls):\n                if attr_name.startswith('__'):\n                    continue\n\n                attr = getattr(cls, attr_name)\n                if name == attr:\n                    return attr\n\n            try:\n                return Enum.from_pod(name)\n            except ValueError:\n                raise ValueError('Invalid enum value: {}'.format(repr(name)))\n\n    reserved = ['values', 'levels', 'names']\n\n    levels = []\n    n = start\n    for v in args:\n        id_v = identifier(v)\n        if id_v in reserved:\n            message = 'Invalid enum level name \"{}\"; must not be in {}'\n            raise ValueError(message.format(v, reserved))\n        name = caseless_string(id_v)\n        lv = level(v, n)\n        setattr(Enum, name, lv)\n        levels.append(lv)\n        n += step\n\n    setattr(Enum, 'levels', levels)\n    setattr(Enum, 'values', [lvl.value for lvl in levels])\n    setattr(Enum, 'names', [lvl.name for lvl in levels])\n\n    return Enum\n\n\nclass ParameterDict(dict):\n    \"\"\"\n    A dict-like object that automatically encodes various types into a url safe string,\n    and enforces a single type for the contents in a list.\n    Each value is first prefixed with 2 letters to preserve type when encoding to a string.\n    The format used is \"value_type, value_dimension\" e.g a 'list of floats' would become 'fl'.\n    \"\"\"\n\n    # Function to determine the appropriate prefix based on the parameters type\n    @staticmethod\n    def _get_prefix(obj):\n        if isinstance(obj, str):\n            prefix = 's'\n        elif isinstance(obj, float):\n            prefix = 'f'\n        elif isinstance(obj, bool):\n            prefix = 'b'\n        elif isinstance(obj, int):\n            prefix = 'i'\n        elif obj is None:\n            prefix = 'n'\n        else:\n            raise ValueError('Unable to encode {} {}'.format(obj, type(obj)))\n        return prefix\n\n    # Function to add prefix and urlencode a provided parameter.\n    @staticmethod\n    def _encode(obj):\n        if isinstance(obj, list):\n            t = type(obj[0])\n            prefix = ParameterDict._get_prefix(obj[0]) + 'l'\n            for item in obj:\n                if not isinstance(item, t):\n                    msg = 'Lists must only contain a single type, contains {} and {}'\n                    raise ValueError(msg.format(t, type(item)))\n            obj = '0newelement0'.join(str(x) for x in obj)\n        else:\n            prefix = ParameterDict._get_prefix(obj) + 's'\n        return quote(prefix + str(obj))\n\n    # Function to decode a string and return a value of the original parameter type.\n    # pylint: disable=too-many-return-statements\n    @staticmethod\n    def _decode(string):\n        value_type = string[:1]\n        value_dimension = string[1:2]\n        value = unquote(string[2:])\n        if value_dimension == 's':\n            if value_type == 's':\n                return str(value)\n            elif value_type == 'b':\n                return boolean(value)\n            elif value_type == 'd':\n                return int(value)\n            elif value_type == 'f':\n                return float(value)\n            elif value_type == 'i':\n                return int(value)\n            elif value_type == 'n':\n                return None\n        elif value_dimension == 'l':\n            return [ParameterDict._decode(value_type + 's' + x)\n                    for x in value.split('0newelement0')]\n        else:\n            raise ValueError('Unknown {} {}'.format(type(string), string))\n\n    def __init__(self, *args, **kwargs):\n        for k, v in kwargs.items():\n            self.__setitem__(k, v)\n        dict.__init__(self, *args)\n\n    def __setitem__(self, name, value):\n        dict.__setitem__(self, name, self._encode(value))\n\n    def __getitem__(self, name):\n        return self._decode(dict.__getitem__(self, name))\n\n    def __contains__(self, item):\n        return dict.__contains__(self, self._encode(item))\n\n    def __iter__(self):\n        return iter((k, self._decode(v)) for (k, v) in list(self.items()))\n\n    def iteritems(self):\n        return self.__iter__()\n\n    def get(self, name):\n        return self._decode(dict.get(self, name))\n\n    def pop(self, key):\n        return self._decode(dict.pop(self, key))\n\n    def popitem(self):\n        key, value = dict.popitem(self)\n        return (key, self._decode(value))\n\n    def iter_encoded_items(self):\n        return dict.items(self)\n\n    def get_encoded_value(self, name):\n        return dict.__getitem__(self, name)\n\n    def values(self):\n        return [self[k] for k in dict.keys(self)]\n\n    def update(self, *args, **kwargs):\n        for d in list(args) + [kwargs]:\n            for k, v in d.items():\n                self[k] = v\n\n\nclass cpu_mask(object):\n    \"\"\"\n    A class to allow for a consistent way of representing a cpus mask with\n    methods to provide conversions between the various required forms. The\n    mask can be specified directly as a mask, as a list of cpus indexes or a\n    sysfs-style string.\n    \"\"\"\n    @staticmethod\n    def from_pod(pod):\n        return cpu_mask(int(pod['cpu_mask']))\n\n    def __init__(self, cpus):\n        self._mask = 0\n        if isinstance(cpus, int):\n            self._mask = cpus\n        elif isinstance(cpus, str):\n            if cpus[:2] == '0x' or cpus[:2] == '0X':\n                self._mask = int(cpus, 16)\n            else:\n                self._mask = list_to_mask(ranges_to_list(cpus))\n        elif isinstance(cpus, list):\n            self._mask = list_to_mask(cpus)\n        elif isinstance(cpus, cpu_mask):\n            self._mask = cpus._mask  # pylint: disable=protected-access\n        else:\n            msg = 'Unknown conversion from {} to cpu mask'\n            raise ValueError(msg.format(cpus))\n\n    def __bool__(self):\n        \"\"\"Allow for use in comparisons to check if a mask has been set\"\"\"\n        return bool(self._mask)\n\n    __nonzero__ = __bool__\n\n    def __repr__(self):\n        return 'cpu_mask: {}'.format(self.mask())\n\n    __str__ = __repr__\n\n    def list(self):\n        \"\"\"Returns a list of the indexes of bits that are set in the mask.\"\"\"\n        return list(reversed(mask_to_list(self._mask)))\n\n    def mask(self, prefix=True):\n        \"\"\"Returns a hex representation of the mask with an optional prefix\"\"\"\n        if prefix:\n            return hex(self._mask)\n        else:\n            return hex(self._mask)[2:]\n\n    def ranges(self):\n        \"\"\"\"Returns a sysfs-style ranges string\"\"\"\n        return list_to_ranges(self.list())\n\n    def to_pod(self):\n        return {'cpu_mask': self._mask}\n"
  },
  {
    "path": "wa/workloads/__init__.py",
    "content": ""
  },
  {
    "path": "wa/workloads/adobereader/__init__.py",
    "content": "#    Copyright 2014-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.utils.types import list_of_strs\nfrom wa.framework.exception import ValidationError\n\n\nclass AdobeReader(ApkUiautoWorkload):\n\n    name = 'adobereader'\n    package_names = ['com.adobe.reader']\n    description = '''\n    The Adobe Reader workflow carries out the following typical productivity tasks.\n\n    Test description:\n\n    1. Open a local file on the device\n    2. Gestures test:\n        2.1. Swipe down across the central 50% of the screen in 200 x 5ms steps\n        2.2. Swipe up across the central 50% of the screen in 200 x 5ms steps\n        2.3. Swipe right from the edge of the screen in 50 x 5ms steps\n        2.4. Swipe left from the edge of the screen  in 50 x 5ms steps\n        2.5. Pinch out 50% in 100 x 5ms steps\n        2.6. Pinch In 50% in 100 x 5ms steps\n    3. Search test:\n        Search ``document_name`` for each string in the ``search_string_list``\n    4. Close the document\n\n    Known working APK version: 19.7.1.10709\n    '''\n\n    default_search_strings = [\n        'The quick brown fox jumps over the lazy dog',\n        'TEST_SEARCH_STRING',\n    ]\n\n    parameters = [\n        Parameter('document_name', kind=str, default='uxperf_test_doc.pdf',\n                  description='''\n                  The document name to use for the Gesture and Search test.\n                  '''),\n        Parameter('search_string_list', kind=list_of_strs, default=default_search_strings,\n                  constraint=lambda x: len(x) > 0,\n                  description='''\n                  For each string in the list, a document search is performed\n                  using the string as the search term. At least one must be\n                  provided.\n                  '''),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(AdobeReader, self).__init__(target, **kwargs)\n        self.deployable_assets = [self.document_name]\n        self.asset_directory = self.target.path.join(self.target.external_storage,\n                                                     'Android', 'data',\n                                                     'com.adobe.reader', 'files')\n\n    def init_resources(self, context):\n        super(AdobeReader, self).init_resources(context)\n        # Only accept certain file formats\n        if os.path.splitext(self.document_name.lower())[1] not in ['.pdf']:\n            raise ValidationError('{} must be a PDF file'.format(self.document_name))\n        self.gui.uiauto_params['filename'] = self.document_name\n        self.gui.uiauto_params['search_string_list'] = self.search_string_list\n\n    def setup(self, context):\n        super(AdobeReader, self).setup(context)\n        # Need to re-deploy each time to adobe folder as it is wiped upon clearing app\n        self.deploy_assets(context)\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.adobereader\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion '25.0.0'\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.adobereader\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/app/src/main/java/com/arm/wa/uiauto/adobereader/UiAutomation.java",
    "content": "package com.arm.wa.uiauto.adobereader;\n\n/*    Copyright 2014-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\n\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureTestParams;\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureType;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\nimport com.arm.wa.uiauto.UiAutoUtils;\n\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.Map.Entry;\nimport java.util.concurrent.TimeUnit;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation implements ApplaunchInterface {\n\n    private long networkTimeout =  TimeUnit.SECONDS.toMillis(20);\n    private long searchTimeout =  TimeUnit.SECONDS.toMillis(20);\n\n    protected Bundle parameters;\n    protected String packageID;\n    protected String filename;\n    protected String[] searchStrings;\n\n\n    @Before\n    public void initialize(){\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n        filename = parameters.getString(\"filename\");\n        searchStrings = parameters.getStringArray(\"search_string_list\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        runApplicationSetup();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        openFile(filename);\n        gesturesTest();\n        searchPdfTest(searchStrings);\n        exitDocument();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n    public void runApplicationSetup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        dismissWelcomeView();\n    }\n\n    // Returns the launch command for the application.\n    public String getLaunchCommand() {\n        String launch_command;\n        launch_command = UiAutoUtils.createLaunchCommand(parameters);\n        return launch_command;\n    }\n\n    // Pass the workload parameters, used for applaunch\n    public void setWorkloadParameters(Bundle workload_parameters) {\n        parameters = workload_parameters;\n        packageID = getPackageID(parameters);\n    }\n\n    // Sets the UiObject that marks the end of the application launch.\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject =\n            mDevice.findObject(new UiSelector().textContains(\"RECENT\")\n                                               .className(\"android.widget.TextView\"));\n        return launchEndObject;\n    }\n\n\n    private void dismissWelcomeView() throws Exception {\n\n        //Close optional sign in screen on newer versions (19.4.0.9813)\n        UiObject closeWelcomeImage = mDevice.findObject(new UiSelector().resourceId(packageID + \"optional_signing_cross_button\")\n            .className(\"android.widget.ImageView\"));\n    \tif (closeWelcomeImage.exists()) {\n    \t\tcloseWelcomeImage.click();\n    \t}\n\n        // Deal with popup dialog message tutorial on newer versions\n        UiObject tutorialDialog = mDevice.findObject(new UiSelector().resourceId(packageID + \"close_card_button\")\n            .className(\"android.widget.ImageButton\"));\n\n        if (tutorialDialog.waitForExists(TimeUnit.SECONDS.toMillis(3))) {\n                tutorialDialog.click();\n        }\n\n        //Check to see if app is on home screen\n        if (mDevice.findObject(new UiSelector().textContains(\"Home\")).exists()) {\n            return;\n        }\n\n        // Support older version (Last known working 16.1)\n        UiObject welcomeView = getUiObjectByResourceId(\"android:id/content\",\n                                                       \"android.widget.FrameLayout\");\n        welcomeView.swipeLeft(10);\n        welcomeView.swipeLeft(10);\n\n        UiObject onboarding_finish_button =\n            mDevice.findObject(new UiSelector().resourceId(\"com.adobe.reader:id/onboarding_finish_button\"));\n\n        if (!onboarding_finish_button.exists()) {\n            welcomeView.swipeLeft(10);\n        }\n\n        if (onboarding_finish_button.exists()) {\n            clickUiObject(BY_ID, packageID + \"onboarding_finish_button\", \"android.widget.Button\");\n        }\n\n        // Deal with popup dialog message promoting Dropbox access\n        UiObject dropBoxDialog =\n                mDevice.findObject(new UiSelector().text(\"Now you can access your Dropbox files.\")\n                        .className(\"android.widget.TextView\"));\n        if (dropBoxDialog.exists()) {\n            clickUiObject(BY_TEXT, \"Remind Me Later\", \"android.widget.Button\");\n        }\n\n        // Also deal with the Dropbox CoachMark blue hint popup\n        UiObject dropBoxcoachMark =\n                mDevice.findObject(new UiSelector().description(\"CoachMark\")\n                                                   .className(\"android.widget.LinearLayout\"));\n        if (dropBoxcoachMark.exists()) {\n            tapDisplayCentre();\n        }\n\n        UiObject actionBarTitle = mDevice.findObject(new UiSelector().textContains(\"My Documents\")\n                                                            .className(\"android.widget.TextView\"));\n        actionBarTitle.waitForExists(uiAutoTimeout);\n    }\n\n    private void openFile(final String filename) throws Exception {\n        String testTag = \"open_document\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n        \n        UiObject fileObject = findFileObject(filename);\n        logger.start();\n\n        fileObject.clickAndWaitForNewWindow(uiAutoTimeout);\n        // Wait for the doc to open by waiting for the viewPager UiObject to exist\n        UiObject viewPager =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"viewPager\"));\n        if (!viewPager.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"viewPager\\\".\");\n        };\n\n        logger.stop();\n    }\n\n    private UiObject findFileObject(String filename) throws Exception {\n        UiObject localFilesTab = mDevice.findObject(new UiSelector().textContains(\"LOCAL\")\n            .className(\"android.widget.TextView\"));\n        \n        // Support older versions\n        if (localFilesTab.exists()) {\n            localFilesTab.click();\n\n            UiObject directoryPath =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"directoryPath\"));\n            if (!directoryPath.waitForExists(TimeUnit.SECONDS.toMillis(60))) {\n                throw new UiObjectNotFoundException(\"Could not find any local files\");\n            }\n\n            // Click the button to search from the present file list view\n            UiObject searchButton =\n                    mDevice.findObject(new UiSelector().resourceId(packageID + \"split_pane_search\"));\n            if (!searchButton.waitForExists(TimeUnit.SECONDS.toMillis(10))) {\n                throw new UiObjectNotFoundException(\"Could not find search button\");\n            }\n            searchButton.click();\n            // Force a refresh of files before searching\n            uiDeviceSwipe(Direction.DOWN, 100);\n            // Repeat as first swipe is sometimes ignored.\n            uiDeviceSwipe(Direction.DOWN, 100);\n\n            // Enter search text into the file searchBox.  This will automatically filter the list.\n            UiObject searchBox =\n                    mDevice.findObject(new UiSelector().resourceIdMatches(\".*search_src_text\")\n                                                       .classNameMatches(\"android.widget.Edit.*\"));\n\n            searchBox.setText(filename);\n\n            // Open a file from a file list view by searching for UiObjects containing the doc title.\n            return getUiObjectByText(filename, \"android.widget.TextView\");\n        }\n\n        // Support for newer version\n        UiObject searchNavigationButton = mDevice.findObject(new UiSelector()\n            .resourceIdMatches(packageID + \"bottombaritem_search\")\n            .className(\"android.widget.FrameLayout\"));\n\n        // On devices with larger screen sizes, layout is different \n        if(!searchNavigationButton.exists()) {\n            searchNavigationButton = getUiObjectByResourceId(packageID + \"search_button_home\",\n                                                             \"android.widget.TextView\");\n        }\n\n        searchNavigationButton.click();\n\n        UiObject searchBox =\n                mDevice.findObject(new UiSelector().resourceIdMatches(\".*search_src_text\")\n                                                   .classNameMatches(\"android.widget.EditText\"));\n\n        searchBox.click();\n        searchBox.setText(filename);\n        mDevice.pressEnter();\n\n        // Remove file extension\n        return getUiObjectByText(filename.substring(0,filename.lastIndexOf(\".\")), \"android.widget.TextView\");\n    }\n\n    private void gesturesTest() throws Exception {\n        String testTag = \"gesture\";\n\n        // Perform a range of swipe tests at different speeds and on different views\n        LinkedHashMap<String, GestureTestParams> testParams = new LinkedHashMap<String, GestureTestParams>();\n        testParams.put(\"swipe_up\", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.UP, 100));\n        testParams.put(\"swipe_down\", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.DOWN, 100));\n        testParams.put(\"swipe_right\", new GestureTestParams(GestureType.UIOBJECT_SWIPE, Direction.RIGHT, 50));\n        testParams.put(\"swipe_left\", new GestureTestParams(GestureType.UIOBJECT_SWIPE, Direction.LEFT, 50));\n        testParams.put(\"pinch_out\", new GestureTestParams(GestureType.PINCH, PinchType.OUT, 100, 50));\n        testParams.put(\"pinch_in\", new GestureTestParams(GestureType.PINCH, PinchType.IN, 100, 50));\n\n        Iterator<Entry<String, GestureTestParams>> it = testParams.entrySet().iterator();\n\n        // On some devices the first device swipe is ignored so perform it here\n        // to prevent the first test gesture from being incorrectly logged\n        uiDeviceSwipe(Direction.DOWN, 200);\n\n        UiObject view =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"pageView\"));\n        if (!view.waitForExists(TimeUnit.SECONDS.toMillis(10))) {\n            throw new UiObjectNotFoundException(\"Could not find page view\");\n        }\n\n        while (it.hasNext()) {\n            Entry<String, GestureTestParams> pair = it.next();\n            GestureType type = pair.getValue().gestureType;\n            Direction dir = pair.getValue().gestureDirection;\n            PinchType pinch = pair.getValue().pinchType;\n            int steps = pair.getValue().steps;\n            int percent = pair.getValue().percent;\n\n            String runName = String.format(testTag + \"_\" + pair.getKey());\n            ActionLogger logger = new ActionLogger(runName, parameters);\n            logger.start();\n\n            switch (type) {\n                case UIDEVICE_SWIPE:\n                    uiDeviceSwipe(dir, steps);\n                    break;\n                case UIOBJECT_SWIPE:\n                    uiObjectSwipe(view, dir, steps);\n                    break;\n                case PINCH:\n                    uiObjectVertPinch(view, pinch, steps, percent);\n                    break;\n                default:\n                    break;\n            }\n\n            logger.stop();\n        }\n    }\n\n    private void searchPdfTest(final String[] searchStrings) throws Exception {\n        String testTag = \"search\";\n\n        // Tap the centre to bring up the menu gui\n        // Sometimes the first tap wont register, so check if search appears\n        // and if not, tap again before continuing\n        tapDisplayCentre();\n        UiObject searchIcon =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"document_view_search_icon\"));\n        if (!searchIcon.waitForExists(uiAutoTimeout)) {\n            tapDisplayCentre();\n        }\n\n        if (!searchIcon.waitForExists(uiAutoTimeout)) {\n            searchIcon =\n                    mDevice.findObject(new UiSelector().resourceId(packageID + \"document_view_search\"));\n            if (!searchIcon.waitForExists(uiAutoTimeout)) {\n                tapDisplayCentre();\n            }\n        }\n\n        for (int i = 0; i < searchStrings.length; i++) {\n            String runName = String.format(testTag + \"_string\" + i);\n            ActionLogger logger = new ActionLogger(runName, parameters);\n\n            // Click on the search button icon and enter text in the box.  This closes the keyboard\n            // so click the box again and press Enter to start the search.\n            searchIcon.clickAndWaitForNewWindow();\n\n            UiObject searchBox =\n                    mDevice.findObject(new UiSelector().resourceIdMatches(\".*search_src_text\")\n                                                       .className(\"android.widget.EditText\"));\n\n            searchBox.setText(searchStrings[i]);\n\n            logger.start();\n\n            pressEnter();\n\n            // Check the progress bar icon.  When this disappears the search is complete.\n            UiObject progressBar =\n                    mDevice.findObject(new UiSelector().resourceId(packageID + \"searchProgress\")\n                                                       .className(\"android.widget.ProgressBar\"));\n            progressBar.waitForExists(uiAutoTimeout);\n            progressBar.waitUntilGone(searchTimeout);\n\n            logger.stop();\n            mDevice.pressBack();\n        }\n    }\n\n    private void exitDocument() throws Exception {\n        // Return from the document view to the file list view by pressing home and my documents.\n        UiObject homeButton =\n                mDevice.findObject(new UiSelector().resourceId(\"android:id/home\")\n                        .className(\"android.widget.ImageView\"));\n        // Newer version of app have a menu button instead of home button.\n        UiObject menuButton =\n                mDevice.findObject(new UiSelector().description(\"Navigate up\"));\n\n        if (!(homeButton.exists() || menuButton.exists())){\n            tapDisplayCentre();\n        }\n\n        if (homeButton.exists()){\n            homeButton.click();\n        }\n        else if (menuButton.exists()){\n            menuButton.click();\n        }\n        else {\n            mDevice.pressBack();\n        }\n\n        UiObject searchBackButton =\n                mDevice.findObject(new UiSelector().description(\"Collapse\")\n                                                   .className(\"android.widget.ImageButton\"));\n        if (searchBackButton.exists()){\n            searchBackButton.click();\n        }\n        else {\n        \tmDevice.pressBack();\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.adobereader\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/adobereader/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/aitutu/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport re\n\nfrom wa import ApkUiautoWorkload\nfrom wa.framework.exception import WorkloadError\n\n\nclass Aitutu(ApkUiautoWorkload):\n\n    name = 'aitutu'\n    package_names = ['com.antutu.aibenchmark']\n    regex_matches = [re.compile(r'Overall Score ([\\d.]+)'),\n                     re.compile(r'Image Total Score ([\\d.]+) ([\\w]+) ([\\w]+)'),\n                     re.compile(r'Image Speed Score ([\\d.]+) ([\\w]+) ([\\w]+)'),\n                     re.compile(r'Image Accuracy Score ([\\d.]+) ([\\w]+) ([\\w]+)'),\n                     re.compile(r'Object Total Score ([\\d.]+) ([\\w]+) ([\\w]+)'),\n                     re.compile(r'Object Speed Score ([\\d.]+) ([\\w]+) ([\\w]+)'),\n                     re.compile(r'Object Accuracy Score ([\\d.]+) ([\\w]+) ([\\w]+)')]\n    description = '''\n    Executes Aitutu Image Speed/Accuracy and Object Speed/Accuracy tests\n\n    The Aitutu workflow carries out the following tasks.\n    1. Open Aitutu application\n    2. Download the resources for the test\n    3. Execute the tests\n\n    Known working APK version: 1.0.3\n    '''\n\n    requires_network = True\n\n    def __init__(self, target, **kwargs):\n        super(Aitutu, self).__init__(target, **kwargs)\n        self.gui.timeout = 1200000\n\n    def update_output(self, context):\n        super(Aitutu, self).update_output(context)\n        expected_results = len(self.regex_matches)\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                for regex in self.regex_matches:\n                    match = regex.search(line)\n                    if match:\n                        classifiers = {}\n                        result = match.group(1)\n                        if (len(match.groups())) > 1:\n                            entry = regex.pattern.rsplit(None, 3)[0]\n                            classifiers = {'model': match.group(3)}\n                        else:\n                            entry = regex.pattern.rsplit(None, 1)[0]\n                        context.add_metric(entry, result, '', lower_is_better=False, classifiers=classifiers)\n                        expected_results -= 1\n        if expected_results > 0:\n            msg = \"The Aitutu workload has failed. Expected {} scores, Detected {} scores.\"\n            raise WorkloadError(msg.format(len(self.regex_matches), expected_results))\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.aitutu\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion '25.0.0'\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.aitutu\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/app/src/main/java/com/arm/wa/uiauto/aitutu/UiAutomation.java",
    "content": "/*    Copyright 2013-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.aitutu;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.graphics.Rect;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.view.KeyEvent;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"UXPERF\";\n\n    @Test\n    public void setup() throws Exception {\n        clearPopups();\n        downloadAssets();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        runBenchmark();\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        getScores();\n    }\n\n    public void clearPopups() throws Exception {\n\n        UiObject agreement =\n            mDevice.findObject(new UiSelector().textContains(\"NEXT\"));\n        agreement.waitForExists(5000);\n        if (agreement.exists()) {\n            agreement.click();\n        }\n\n        UiSelector selector = new UiSelector();\n\n        UiObject cancel = mDevice.findObject(selector.textContains(\"CANCEL\")\n                                             .className(\"android.widget.Button\"));\n        cancel.waitForExists(60000);\n        if (cancel.exists()){\n            cancel.click();\n        }\n    }\n\n    public void downloadAssets() throws Exception {\n        UiSelector selector = new UiSelector();\n        //Start the tests\n        UiObject start = mDevice.findObject(selector.textContains(\"Start Testing\")\n                                                     .className(\"android.widget.TextView\"));\n        waitObject(start);\n        start.click();\n\n        UiObject check = mDevice.findObject(selector.textContains(\"classification\")\n                                                     .className(\"android.widget.TextView\"));\n        waitObject(check);\n    }\n\n    public void runBenchmark() throws Exception {\n        UiSelector selector = new UiSelector();\n\n        //Wait for the tests to complete\n        UiObject complete =\n            mDevice.findObject(selector.text(\"TEST AGAIN\")\n                .className(\"android.widget.Button\"));\n        complete.waitForExists(1200000);\n\n    }\n\n    public void getScores() throws Exception {\n        mDevice.waitForIdle(5000);\n        UiSelector selector = new UiSelector();\n        //Declare the models used\n        UiObject imageMod =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(1))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewAIModelName\"));\n        UiObject objectMod =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(4))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewAIModelName\"));\n        //Log the scores and models\n        UiObject totalScore =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/textViewTotalScore\"));\n        Log.d(TAG, \"Overall Score \" + totalScore.getText());\n        UiObject imageTotal =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(1))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewSIDScore\"));\n        Log.d(TAG, \"Image Total Score \" + imageTotal.getText() + \" Model \" + imageMod.getText());\n        UiObject imageSpeed =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(2))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewBIDScore\"));\n        Log.d(TAG, \"Image Speed Score \" + imageSpeed.getText() + \" Model \" + imageMod.getText());\n        UiObject imageAcc =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(3))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewBIDScore\"));\n        Log.d(TAG, \"Image Accuracy Score \" + imageAcc.getText() + \" Model \" + imageMod.getText());\n        UiObject objectTotal =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(4))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewSIDScore\"));\n        Log.d(TAG, \"Object Total Score \" + objectTotal.getText() + \" Model \" + objectMod.getText());\n        UiObject objectSpeed =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(5))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewBIDScore\"));\n        Log.d(TAG, \"Object Speed Score \" + objectSpeed.getText() + \" Model \" + objectMod.getText());\n        UiObject objectAcc =\n            mDevice.findObject(selector.resourceId(\"com.antutu.aibenchmark:id/recyclerView\"))\n            .getChild(selector.index(6))\n            .getChild(selector.resourceId(\"com.antutu.aibenchmark:id/textViewBIDScore\"));\n        Log.d(TAG, \"Object Accuracy Score \" + objectAcc.getText() + \" Model \" + objectMod.getText());\n    }\n}\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.aitutu\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/aitutu/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/androbench/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport re\n\nfrom wa import ApkUiautoWorkload\nfrom wa.framework.exception import WorkloadError\n\n\nclass Androbench(ApkUiautoWorkload):\n\n    name = 'androbench'\n    package_names = ['com.andromeda.androbench2']\n    regex_matches = [re.compile(r'Sequential Read Score ([\\d.]+)'),\n                     re.compile(r'Sequential Write Score ([\\d.]+)'),\n                     re.compile(r'Random Read Score ([\\d.]+)'),\n                     re.compile(r'Random Write Score ([\\d.]+)'),\n                     re.compile(r'SQL Insert Score ([\\d.]+)'),\n                     re.compile(r'SQL Update Score ([\\d.]+)'),\n                     re.compile(r'SQL Delete Score ([\\d.]+)')]\n    description = '''\n    Executes storage performance benchmarks\n\n    The Androbench workflow carries out the following typical productivity tasks.\n    1. Open Androbench application\n    2. Execute all memory benchmarks\n\n    Known working APK version: 5.0.1\n    '''\n\n    def update_output(self, context):\n        super(Androbench, self).update_output(context)\n        expected_results = len(self.regex_matches)\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                for regex in self.regex_matches:\n                    match = regex.search(line)\n                    if match:\n                        result = float(match.group(1))\n                        entry = regex.pattern.rsplit(None, 1)[0]\n                        context.add_metric(entry, result, 'MB/s', lower_is_better=False)\n                        expected_results -= 1\n        if expected_results > 0:\n            msg = \"The Androbench workload has failed. Expected {} scores, Detected {} scores.\"\n            raise WorkloadError(msg.format(len(self.regex_matches), expected_results))\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.androbench\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion '25.0.0'\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.androbench\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/app/src/main/java/com/arm/wa/uiauto/androbench/UiAutomation.java",
    "content": "/*    Copyright 2013-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.androbench;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.graphics.Rect;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.view.KeyEvent;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"UXPERF\";\n\n    @Test\n    public void setup() throws Exception {\n        dismissPermissions();\n        dismissAndroidVersionPopup();\n    }\n\n    @Test\n    public void dismissPermissions() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject cont = mDevice.findObject(selector.textContains(\"Continue\"));\n\n        if (cont.exists()) {\n            cont.click();\n        }\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        runBenchmark();\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        getScores();\n    }\n\n    public void runBenchmark() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject btn_microbench = mDevice.findObject(selector.textContains(\"Micro\")\n                                                     .className(\"android.widget.Button\"));\n        if (btn_microbench.exists()) {\n            btn_microbench.click();\n        } else {\n            UiObject bench =\n                mDevice.findObject(new UiSelector().resourceIdMatches(\"com.andromeda.androbench2:id/btnStartingBenchmarking\"));\n            Rect bounds = bench.getBounds();\n            mDevice.click(bounds.centerX(), bounds.centerY());\n        }\n        UiObject btn_yes= mDevice.findObject(selector.textContains(\"Yes\")\n                                                     .className(\"android.widget.Button\"));\n        btn_yes.click();\n\n        UiObject complete_text = mDevice.findObject(selector.text(\"Cancel\")\n                                                        .className(\"android.widget.Button\"));\n        waitObject(complete_text);\n        sleep(2);\n        complete_text.click();\n    }\n\n    public void getScores() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject seqRead =\n            mDevice.findObject(selector.text(\"Sequential Read\").fromParent(selector.index(1)));\n        UiObject seqWrite =\n            mDevice.findObject(selector.text(\"Sequential Write\").fromParent(selector.index(1)));\n        UiObject ranRead =\n            mDevice.findObject(selector.text(\"Random Read\").fromParent(selector.index(1)));\n        UiObject ranWrite =\n            mDevice.findObject(selector.text(\"Random Write\").fromParent(selector.index(1)));\n        UiObject sqlInsert =\n            mDevice.findObject(selector.text(\"SQLite Insert\").fromParent(selector.index(1)));\n        UiObject sqlUpdate =\n            mDevice.findObject(selector.text(\"SQLite Update\").fromParent(selector.index(1)));\n        UiObject sqlDelete =\n            mDevice.findObject(selector.text(\"SQLite Delete\").fromParent(selector.index(1)));\n\n        UiScrollable scrollView = new UiScrollable(new UiSelector().scrollable(true));\n        Log.d(TAG, \"Sequential Read Score \" + seqRead.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(seqWrite);        }\n        Log.d(TAG, \"Sequential Write Score \" + seqWrite.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(ranRead);}\n        Log.d(TAG, \"Random Read Score \" + ranRead.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(ranWrite);}\n        Log.d(TAG, \"Random Write Score \" + ranWrite.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(sqlInsert);}\n        Log.d(TAG, \"SQL Insert Score \" + sqlInsert.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(sqlUpdate);}\n        Log.d(TAG, \"SQL Update Score \" + sqlUpdate.getText());\n\n        if (scrollView.exists()){scrollView.scrollIntoView(sqlDelete);}\n        Log.d(TAG, \"SQL Delete Score \" + sqlDelete.getText());\n    }\n}\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.androbench\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/androbench/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/angrybirds_rio/__init__.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa import ApkReventWorkload\n\n\nclass AngryBirdsRio(ApkReventWorkload):\n\n    name = 'angrybirds_rio'\n    package_names = ['com.rovio.angrybirdsrio']\n    description = \"\"\"\n    Angry Birds Rio game.\n\n    The sequel to the very popular Android 2D game.\n    \"\"\"\n"
  },
  {
    "path": "wa/workloads/antutu/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport re\nimport os\nimport time\n\nfrom wa import ApkUiautoWorkload, ApkWorkload, WorkloadError, Parameter, ApkFile, File\n\n\nclass Antutu(ApkUiautoWorkload):\n\n    name = 'antutu'\n    package_names = ['com.antutu.ABenchMark']\n    regex_matches_v7 = [re.compile(r'CPU Maths Score (.+)'),\n                        re.compile(r'CPU Common Score (.+)'),\n                        re.compile(r'CPU Multi Score (.+)'),\n                        re.compile(r'GPU Marooned Score (.+)'),\n                        re.compile(r'GPU Coastline Score (.+)'),\n                        re.compile(r'GPU Refinery Score (.+)'),\n                        re.compile(r'Data Security Score (.+)'),\n                        re.compile(r'Data Processing Score (.+)'),\n                        re.compile(r'Image Processing Score (.+)'),\n                        re.compile(r'User Experience Score (.+)'),\n                        re.compile(r'RAM Score (.+)'),\n                        re.compile(r'ROM Score (.+)')]\n    regex_matches_v8 = [re.compile(r'CPU Mathematical Operations Score (.+)'),\n                        re.compile(r'CPU Common Algorithms Score (.+)'),\n                        re.compile(r'CPU Multi-Core Score (.+)'),\n                        re.compile(r'GPU Terracotta Score (.+)'),\n                        re.compile(r'GPU Coastline Score (.+)'),\n                        re.compile(r'GPU Refinery Score (.+)'),\n                        re.compile(r'Data Security Score (.+)'),\n                        re.compile(r'Data Processing Score (.+)'),\n                        re.compile(r'Image Processing Score (.+)'),\n                        re.compile(r'User Experience Score (.+)'),\n                        re.compile(r'RAM Access Score (.+)'),\n                        re.compile(r'ROM APP IO Score (.+)'),\n                        re.compile(r'ROM Sequential Read Score (.+)'),\n                        re.compile(r'ROM Sequential Write Score (.+)'),\n                        re.compile(r'ROM Random Access Score (.+)')]\n    regex_matches_v9 = [re.compile(r'CPU Mathematical Operations Score (.+)'),\n                        re.compile(r'CPU Common Algorithms Score (.+)'),\n                        re.compile(r'CPU Multi-Core Score (.+)'),\n                        re.compile(r'GPU Terracotta Score (.+)'),\n                        re.compile(r'GPU Swordsman Score (.+)'),\n                        re.compile(r'GPU Refinery Score (.+)'),\n                        re.compile(r'Data Security Score (.+)'),\n                        re.compile(r'Data Processing Score (.+)'),\n                        re.compile(r'Image Processing Score (.+)'),\n                        re.compile(r'User Experience Score (.+)'),\n                        re.compile(r'Video CTS Score (.+)'),\n                        re.compile(r'Video Decode Score (.+)'),\n                        re.compile(r'RAM Access Score (.+)'),\n                        re.compile(r'ROM APP IO Score (.+)'),\n                        re.compile(r'ROM Sequential Read Score (.+)'),\n                        re.compile(r'ROM Sequential Write Score (.+)'),\n                        re.compile(r'ROM Random Access Score (.+)')]\n    regex_matches_v10 = [re.compile(r'CPU Mathematical Operations Score (.+)'),\n                         re.compile(r'CPU Common Algorithms Score (.+)'),\n                         re.compile(r'CPU Multi-Core Score (.+)'),\n                         re.compile(r'GPU Seasons Score (.+)'),\n                         re.compile(r'GPU Coastline2 Score (.+)'),\n                         re.compile(r'RAM Bandwidth Score (.+)'),\n                         re.compile(r'RAM Latency Score (.+)'),\n                         re.compile(r'ROM APP IO Score (.+)'),\n                         re.compile(r'ROM Sequential Read Score (.+)'),\n                         re.compile(r'ROM Sequential Write Score (.+)'),\n                         re.compile(r'ROM Random Access Score (.+)'),\n                         re.compile(r'Data Security Score (.+)'),\n                         re.compile(r'Data Processing Score (.+)'),\n                         re.compile(r'Document Processing Score (.+)'),\n                         re.compile(r'Image Decoding Score (.+)'),\n                         re.compile(r'Image Processing Score (.+)'),\n                         re.compile(r'User Experience Score (.+)'),\n                         re.compile(r'Video CTS Score (.+)'),\n                         re.compile(r'Video Decoding Score (.+)'),\n                         re.compile(r'Video Editing Score (.+)')]\n    description = '''\n    Executes Antutu 3D, UX, CPU and Memory tests\n\n    Test description:\n    1. Open Antutu application\n    2. Execute Antutu benchmark\n\n    Known working APK version: 8.0.4\n    '''\n\n    supported_versions = ['7.0.4', '7.2.0',\n                          '8.0.4', '8.1.9', '8.4.5',\n                          '9.1.6', '9.2.9',\n                          '10.0.1-OB1', '10.0.6-OB6', '10.1.9', '10.2.1', '10.4.3']\n\n    parameters = [\n        Parameter('version', kind=str, allowed_values=supported_versions, override=True,\n                  description=(\n                      '''Specify the version of Antutu to be run.\n                      If not specified, the latest available version will be used.\n                      ''')\n                  )\n    ]\n\n    def __init__(self, device, **kwargs):\n        super(Antutu, self).__init__(device, **kwargs)\n        self.gui.timeout = 1200\n\n    def initialize(self, context):\n        super(Antutu, self).initialize(context)\n        #Install the supporting benchmark\n        supporting_apk = context.get_resource(ApkFile(self, package='com.antutu.benchmark.full'))\n        self.target.install(supporting_apk)\n        #Ensure the orientation is set to portrait\n        self.target.set_rotation(0)\n\n    def setup(self, context):\n        self.gui.uiauto_params['version'] = self.version\n        super(Antutu, self).setup(context)\n\n    def extract_scores(self, context, regex_version):\n        #pylint: disable=no-self-use, too-many-locals\n        cpu = []\n        gpu = []\n        ux = []\n        mem = []\n        expected_results = len(regex_version)\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                for regex in regex_version:\n                    match = regex.search(line)\n                    if match:\n                        try:\n                            result = float(match.group(1))\n                        except ValueError:\n                            result = float('NaN')\n                        entry = regex.pattern.rsplit(None, 1)[0]\n                        context.add_metric(entry, result, lower_is_better=False)\n                        #Calculate group scores if 'CPU' in entry:\n                        if 'CPU' in entry:\n                            cpu.append(result)\n                            cpu_result = sum(cpu)\n                        if 'GPU' in entry:\n                            gpu.append(result)\n                            gpu_result = sum(gpu)\n                        if any([i in entry for i in ['Data', 'Document', 'Image', 'User', 'Video']]):\n                            ux.append(result)\n                            ux_result = sum(ux)\n                        if any([i in entry for i in ['RAM', 'ROM']]):\n                            mem.append(result)\n                            mem_result = sum(mem)\n                        expected_results -= 1\n        if expected_results > 0:\n            msg = \"The Antutu workload has failed. Expected {} scores, Detected {} scores.\"\n            raise WorkloadError(msg.format(len(regex_version), expected_results))\n\n        context.add_metric('CPU Total Score', cpu_result, lower_is_better=False)\n        context.add_metric('GPU Total Score', gpu_result, lower_is_better=False)\n        context.add_metric('UX Total Score', ux_result, lower_is_better=False)\n        context.add_metric('MEM Total Score', mem_result, lower_is_better=False)\n\n        #Calculate overall scores\n        overall_result = float(cpu_result + gpu_result + ux_result + mem_result)\n        context.add_metric('Overall Score', overall_result, lower_is_better=False)\n\n    def update_output(self, context):\n        super(Antutu, self).update_output(context)\n        if self.version.startswith('10'):\n            self.extract_scores(context, self.regex_matches_v10)\n        if self.version.startswith('9'):\n            self.extract_scores(context, self.regex_matches_v9)\n        if self.version.startswith('8'):\n            self.extract_scores(context, self.regex_matches_v8)\n        if self.version.startswith('7'):\n            self.extract_scores(context, self.regex_matches_v7)\n\n\nclass AntutuBDP(ApkWorkload):\n\n    name = \"antutu_bdp\"\n    description = '''\n    Workload for executing the BDP versions of the Antutu APK.\n\n    This will only work with specific APKS provided by Antutu but does\n    unlock command line automation and the capturing of a result file\n    as opposed to using UiAuto and Regex.\n\n    Known working version: 10.4.3-domesticAndroidFullBdp\n    '''\n    activity = 'com.android.module.app.ui.start.ABenchMarkStart --ez isExternal true --es whereTo \"test\"'\n    package_names = ['com.antutu.ABenchMark']\n\n    def initialize(self, context):\n        super(AntutuBDP, self).initialize(context)\n        #Set the files and directories we need\n        self.test_dir = os.path.join(self.target.external_storage_app_dir, 'com.antutu.ABenchMark', 'files', '.antutu')\n        self.settings_xml = context.get_resource(File(self, 'settings.xml'))\n        self.result_file = os.path.join(self.target.external_storage, 'Documents', 'antutu', 'last_result.json')\n        self.output_file = os.path.join(context.output_directory, 'antutu_results.json')\n        self.supporting_apk = context.get_resource(ApkFile(self, package='com.antutu.benchmark.full'))\n\n    def setup(self, context):\n        super(AntutuBDP, self).setup(context)\n        #Install the supporting benchmark\n        self.logger.info(\"Installing the supporting APK\")\n        self.target.install(self.supporting_apk)\n        #Launch the apk to initialize the test dir, then kill it\n        self.target.execute('am start {}/com.android.module.app.ui.test.activity.ActivityScoreBench'.format(self.apk.package))\n        self.target.execute('am force-stop {}'.format(self.apk.package))\n        #Copy the settings.xml to the test dir\n        self.target.push(self.settings_xml, self.test_dir)\n        #Ensure the orientation is set to portrait\n        self.target.set_rotation(0)\n        #Remove any pre-existing test results\n        if self.target.file_exists(self.result_file):\n            self.target.execute('rm {}'.format(self.result_file))\n\n    def run(self, context):\n        super(AntutuBDP, self).run(context)\n        #Launch the tests\n        self.target.execute('am start -n {}/{}'.format(self.apk.package, self.activity))\n        #Wait 10 minutes, then begin polling every 30s for the test result to appear\n        self.logger.debug(\"Waiting 10 minutes before starting to poll for the results file.\")\n        time.sleep(600)\n        #Poll for another 15 minutes, 20 minutes total before timing out\n        end_time = time.time() + 900\n        while time.time() < end_time:\n            if self.target.file_exists(self.result_file):\n                self.logger.debug(\"Result file found.\")\n                return True\n            time.sleep(30)\n            self.logger.debug(\"File not found yet. Continuing polling.\")\n        self.logger.warning(\"File not found within the configured timeout period. Exiting test.\")\n        return False\n\n    def update_output(self, context):\n        super(AntutuBDP, self).update_output(context)\n        self.target.pull(self.result_file, self.output_file)\n        context.add_artifact('antutu_result', self.output_file, kind='data', description='Antutu output from target')\n\n    def teardown(self, context):\n        super(AntutuBDP, self).teardown(context)\n        #Remove the test results file\n        self.target.execute('rm {}'.format(self.result_file))\n        #Remove the supporting APK\n        if self.target.is_installed(self.supporting_apk):\n            self.target.uninstall(self.supporting_apk)\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.antutu\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.antutu\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/app/src/main/java/com/arm/wa/uiauto/antutu/UiAutomation.java",
    "content": "/*    Copyright 2013-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.antutu;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"UXPERF\";\n    public static String TestButton5 = \"com.antutu.ABenchMark:id/start_test_region\";\n    public static String TestButton6 = \"com.antutu.ABenchMark:id/start_test_text\";\n    private static int initialTimeoutSeconds = 20;\n    protected Bundle parameters;\n    protected String version;\n\n    @Before\n    public void initialize(){\n        parameters = getParams();\n        version = parameters.getString(\"version\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n       dismissAndroidVersionPopup();\n       clearPopups();\n    }\n\n    @Test\n    public void runWorkload() throws Exception{\n        hitTest();\n        waitforCompletion();\n    }\n\n    @Test\n    public void extractResults() throws Exception{\n        if (version.startsWith(\"9\")){\n            getScoresv9();\n        } else if (version.startsWith(\"8\")){\n            getScoresv8();\n        } else {\n            getScoresv7();\n        }\n    }\n\n    public void hitTest() throws Exception {\n        UiObject testbutton =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/main_test_start_title\"));\n        testbutton.click();\n        sleep(1);\n    }\n\n    public void clearPopups() throws Exception {\n        UiObject agreement = \n            mDevice.findObject(new UiSelector().textContains(\"NEXT\"));\n        agreement.waitForExists(5000);\n        if (agreement.exists()){\n            agreement.click();\n        }\n\n        UiObject cancel =\n            mDevice.findObject(new UiSelector().textContains(\"CANCEL\"));\n        cancel.waitForExists(5000);\n        if (cancel.exists()){\n            cancel.click();\n        }\n    }\n\n    public void waitforCompletion() throws Exception {\n        UiObject totalScore =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/textViewTotalScore\"));\n        totalScore.waitForExists(600000);\n    }\n\n    public void getScoresv7() throws Exception {\n        //Expand, Extract and Close CPU sub scores\n        UiObject cpuscores =\n            mDevice.findObject(new UiSelector().text(\"CPU\"));\n        cpuscores.click();\n        UiObject cpumaths =\n            mDevice.findObject(new UiSelector().text(\"CPU Mathematics Score\").fromParent(new UiSelector().index(3)));\n        UiObject cpucommon =\n            mDevice.findObject(new UiSelector().text(\"CPU Common Use Score\").fromParent(new UiSelector().index(3)));\n        UiObject cpumulti =\n            mDevice.findObject(new UiSelector().text(\"CPU Multi-Core Score\").fromParent(new UiSelector().index(3)));\n        Log.d(TAG, \"CPU Maths Score \" + cpumaths.getText());\n        Log.d(TAG, \"CPU Common Score \" + cpucommon.getText());\n        Log.d(TAG, \"CPU Multi Score \" + cpumulti.getText());\n        cpuscores.click();\n\n        //Expand, Extract and Close GPU sub scores\n        UiObject gpuscores =\n            mDevice.findObject(new UiSelector().text(\"GPU\"));\n        gpuscores.click();\n        UiObject gpumaroon =\n            mDevice.findObject(new UiSelector().text(\"3D [Marooned] Score\").fromParent(new UiSelector().index(3)));\n        UiObject gpucoast =\n            mDevice.findObject(new UiSelector().text(\"3D [Coastline] Score\").fromParent(new UiSelector().index(3)));\n        UiObject gpurefinery =\n            mDevice.findObject(new UiSelector().text(\"3D [Refinery] Score\").fromParent(new UiSelector().index(3)));\n        Log.d(TAG, \"GPU Marooned Score \" + gpumaroon.getText());\n        Log.d(TAG, \"GPU Coastline Score \" + gpucoast.getText());\n        Log.d(TAG, \"GPU Refinery Score \" + gpurefinery.getText());\n        gpuscores.click();\n\n        //Expand, Extract and Close UX sub scores\n        UiObject uxscores =\n            mDevice.findObject(new UiSelector().text(\"UX\"));\n        uxscores.click();\n        UiObject security =\n            mDevice.findObject(new UiSelector().text(\"Data Security Score\").fromParent(new UiSelector().index(3)));\n        UiObject dataprocessing =\n            mDevice.findObject(new UiSelector().text(\"Data Processing Score\").fromParent(new UiSelector().index(3)));\n        UiObject imageprocessing =\n            mDevice.findObject(new UiSelector().text(\"Image Processing Score\").fromParent(new UiSelector().index(3)));\n        UiObject uxscore =\n            mDevice.findObject(new UiSelector().text(\"User Experience Score\").fromParent(new UiSelector().index(3)));\n        Log.d(TAG, \"Data Security Score \" + security.getText());\n        Log.d(TAG, \"Data Processing Score \" + dataprocessing.getText());\n        Log.d(TAG, \"Image Processing Score \" + imageprocessing.getText());\n        Log.d(TAG, \"User Experience Score \" + uxscore.getText());\n        uxscores.click();\n\n        //Expand, Extract and Close MEM sub scores\n        UiObject memscores =\n            mDevice.findObject(new UiSelector().text(\"MEM\"));\n        memscores.click();\n        UiObject ramscore =\n            mDevice.findObject(new UiSelector().text(\"RAM Score\").fromParent(new UiSelector().index(3)));\n        UiObject romscore =\n            mDevice.findObject(new UiSelector().text(\"ROM Score\").fromParent(new UiSelector().index(3)));\n        Log.d(TAG, \"RAM Score \" + ramscore.getText());\n        Log.d(TAG, \"ROM Score \" + romscore.getText());\n        memscores.click();\n    }\n\n    public void getScoresv8() throws Exception {\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));\n\n        //Expand, Extract and Close CPU sub scores\n        UiObject cpuscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(2))\n            .getChild(new UiSelector().index(4));\n        cpuscores.click();\n        UiObject cpumaths =\n            mDevice.findObject(new UiSelector().text(\"CPU Mathematical Operations\").fromParent(new UiSelector().index(1)));\n        UiObject cpucommon =\n            mDevice.findObject(new UiSelector().text(\"CPU Common Algorithms\").fromParent(new UiSelector().index(1)));\n        UiObject cpumulti =\n            mDevice.findObject(new UiSelector().text(\"CPU Multi-Core\").fromParent(new UiSelector().index(1)));\n        Log.d(TAG, \"CPU Mathematical Operations Score \" + cpumaths.getText());\n        Log.d(TAG, \"CPU Common Algorithms Score \" + cpucommon.getText());\n        Log.d(TAG, \"CPU Multi-Core Score \" + cpumulti.getText());\n        cpuscores.click();\n\n        //Expand, Extract and Close GPU sub scores\n        UiObject gpuscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(3))\n            .getChild(new UiSelector().index(4));\n        gpuscores.click();\n        UiObject gputerracotta =\n            mDevice.findObject(new UiSelector().text(\"Terracotta - Vulkan\").fromParent(new UiSelector().index(1)));\n        UiObject gpucoast =\n            mDevice.findObject(new UiSelector().text(\"Coastline - Vulkan\").fromParent(new UiSelector().index(1)));\n        UiObject gpurefinery =\n            mDevice.findObject(new UiSelector().text(\"Refinery - OpenGL ES3.1+AEP\").fromParent(new UiSelector().index(1)));\n        Log.d(TAG, \"GPU Terracotta Score \" + gputerracotta.getText());\n        Log.d(TAG, \"GPU Coastline Score \" + gpucoast.getText());\n        Log.d(TAG, \"GPU Refinery Score \" + gpurefinery.getText());\n        gpuscores.click();\n\n        //Expand, Extract and Close UX sub scores\n        UiObject uxscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(5))\n            .getChild(new UiSelector().index(4));\n        uxscores.click();\n        UiObject security =\n            mDevice.findObject(new UiSelector().text(\"Data Security\").fromParent(new UiSelector().index(1)));\n        UiObject dataprocessing =\n            mDevice.findObject(new UiSelector().text(\"Data Processing\").fromParent(new UiSelector().index(1)));\n        UiObject imageprocessing =\n            mDevice.findObject(new UiSelector().text(\"Image Processing\").fromParent(new UiSelector().index(1)));\n        UiObject uxscore =\n            mDevice.findObject(new UiSelector().text(\"User Experience\").fromParent(new UiSelector().index(1)));\n        if (!security.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(security);\n        }\n        Log.d(TAG, \"Data Security Score \" + security.getText());\n        if (!dataprocessing.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(dataprocessing);\n        }\n        Log.d(TAG, \"Data Processing Score \" + dataprocessing.getText());\n        if (!imageprocessing.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(imageprocessing);\n        }\n        Log.d(TAG, \"Image Processing Score \" + imageprocessing.getText());\n        if (!uxscore.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(uxscore);\n        }\n        Log.d(TAG, \"User Experience Score \" + uxscore.getText());\n        list.scrollToBeginning(10);\n        uxscores.click();\n\n        //Expand, Extract and Close MEM sub scores\n        UiObject memscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(4))\n            .getChild(new UiSelector().index(4));\n        memscores.click();\n        UiObject ramaccess =\n            mDevice.findObject(new UiSelector().text(\"RAM Access\").fromParent(new UiSelector().index(1)));\n        UiObject romapp =\n            mDevice.findObject(new UiSelector().text(\"ROM APP IO\").fromParent(new UiSelector().index(1)));\n        UiObject romread =\n            mDevice.findObject(new UiSelector().text(\"ROM Sequential Read\").fromParent(new UiSelector().index(1)));\n        UiObject romwrite =\n            mDevice.findObject(new UiSelector().text(\"ROM Sequential Write\").fromParent(new UiSelector().index(1)));\n        UiObject romaccess =\n            mDevice.findObject(new UiSelector().text(\"ROM Random Access\").fromParent(new UiSelector().index(1)));\n       if (!ramaccess.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(ramaccess);\n        }\n        Log.d(TAG, \"RAM Access Score \" + ramaccess.getText());\n       if (!romapp.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romapp);\n        }\n        Log.d(TAG, \"ROM APP IO Score \" + romapp.getText());\n        if (!romread.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romread);\n        }\n        Log.d(TAG, \"ROM Sequential Read Score \" + romread.getText());\n        if (!romwrite.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romwrite);\n        }\n        Log.d(TAG, \"ROM Sequential Write Score \" + romwrite.getText());\n        if (!romaccess.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romaccess);\n        }\n        Log.d(TAG, \"ROM Random Access Score \" + romaccess.getText());\n        list.scrollToBeginning(10);        \n        memscores.click();\n    }\n\n    public void getScoresv9() throws Exception {\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));\n\n        //Expand, Extract and Close CPU sub scores\n        UiObject cpuscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(2))\n            .getChild(new UiSelector().index(4));\n        cpuscores.click();\n        UiObject cpumaths =\n            mDevice.findObject(new UiSelector().text(\"CPU Mathematical Operations\").fromParent(new UiSelector().index(1)));\n        UiObject cpucommon =\n            mDevice.findObject(new UiSelector().text(\"CPU Common Algorithms\").fromParent(new UiSelector().index(1)));\n        UiObject cpumulti =\n            mDevice.findObject(new UiSelector().text(\"CPU Multi-Core\").fromParent(new UiSelector().index(1)));\n        Log.d(TAG, \"CPU Mathematical Operations Score \" + cpumaths.getText());\n        Log.d(TAG, \"CPU Common Algorithms Score \" + cpucommon.getText());\n        Log.d(TAG, \"CPU Multi-Core Score \" + cpumulti.getText());\n        cpuscores.click();\n\n        //Expand, Extract and Close GPU sub scores\n        UiObject gpuscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(3))\n            .getChild(new UiSelector().index(4));\n        gpuscores.click();\n        UiObject gputerracotta =\n            mDevice.findObject(new UiSelector().text(\"Terracotta - Vulkan\").fromParent(new UiSelector().index(1)));\n        UiObject gpuswordsman =\n            mDevice.findObject(new UiSelector().text(\"Swordsman - Vulkan\").fromParent(new UiSelector().index(1)));\n        UiObject gpurefinery =\n            mDevice.findObject(new UiSelector().text(\"Refinery - OpenGL ES3.1+AEP\").fromParent(new UiSelector().index(1)));\n        Log.d(TAG, \"GPU Terracotta Score \" + gputerracotta.getText());\n        Log.d(TAG, \"GPU Swordsman Score \" + gpuswordsman.getText());\n        Log.d(TAG, \"GPU Refinery Score \" + gpurefinery.getText());\n        gpuscores.click();\n\n        //Expand, Extract and Close UX sub scores\n        UiObject uxscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(5))\n            .getChild(new UiSelector().index(4));\n        uxscores.click();\n        UiObject security =\n            mDevice.findObject(new UiSelector().text(\"Data Security\").fromParent(new UiSelector().index(1)));\n        UiObject dataprocessing =\n            mDevice.findObject(new UiSelector().text(\"Data Processing\").fromParent(new UiSelector().index(1)));\n        UiObject imageprocessing =\n            mDevice.findObject(new UiSelector().text(\"Image Processing\").fromParent(new UiSelector().index(1)));\n        UiObject uxscore =\n            mDevice.findObject(new UiSelector().text(\"User Experience\").fromParent(new UiSelector().index(1)));\n        UiObject videocts =\n            mDevice.findObject(new UiSelector().text(\"Video CTS\").fromParent(new UiSelector().index(1)));\n        UiObject videodecode =\n            mDevice.findObject(new UiSelector().text(\"Video Decode\").fromParent(new UiSelector().index(1)));\n        if (!security.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(security);\n        }\n        Log.d(TAG, \"Data Security Score \" + security.getText());\n        if (!dataprocessing.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(dataprocessing);\n        }\n        Log.d(TAG, \"Data Processing Score \" + dataprocessing.getText());\n        if (!imageprocessing.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(imageprocessing);\n        }\n        Log.d(TAG, \"Image Processing Score \" + imageprocessing.getText());\n        if (!uxscore.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(uxscore);\n        }\n        Log.d(TAG, \"User Experience Score \" + uxscore.getText());\n        if (!videocts.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(videocts);\n        }\n        Log.d(TAG, \"Video CTS Score \" + videocts.getText());\n        if (!videodecode.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(videodecode);\n        }\n        Log.d(TAG, \"Video Decode Score \" + videodecode.getText());\n        list.scrollToBeginning(10);\n        uxscores.click();\n\n        //Expand, Extract and Close MEM sub scores\n        UiObject memscores =\n            mDevice.findObject(new UiSelector().resourceId(\"com.antutu.ABenchMark:id/result_details_recyclerView\"))\n            .getChild(new UiSelector().index(4))\n            .getChild(new UiSelector().index(4));\n        memscores.click();\n        UiObject ramaccess =\n            mDevice.findObject(new UiSelector().text(\"RAM Access\").fromParent(new UiSelector().index(1)));\n        UiObject romapp =\n            mDevice.findObject(new UiSelector().text(\"ROM APP IO\").fromParent(new UiSelector().index(1)));\n        UiObject romread =\n            mDevice.findObject(new UiSelector().text(\"ROM Sequential Read\").fromParent(new UiSelector().index(1)));\n        UiObject romwrite =\n            mDevice.findObject(new UiSelector().text(\"ROM Sequential Write\").fromParent(new UiSelector().index(1)));\n        UiObject romaccess =\n            mDevice.findObject(new UiSelector().text(\"ROM Random Access\").fromParent(new UiSelector().index(1)));\n       if (!ramaccess.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(ramaccess);\n        }\n        Log.d(TAG, \"RAM Access Score \" + ramaccess.getText());\n       if (!romapp.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romapp);\n        }\n        Log.d(TAG, \"ROM APP IO Score \" + romapp.getText());\n        if (!romread.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romread);\n        }\n        Log.d(TAG, \"ROM Sequential Read Score \" + romread.getText());\n        if (!romwrite.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romwrite);\n        }\n        Log.d(TAG, \"ROM Sequential Write Score \" + romwrite.getText());\n        if (!romaccess.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(romaccess);\n        }\n        Log.d(TAG, \"ROM Random Access Score \" + romaccess.getText());\n        list.scrollToBeginning(10);        \n        memscores.click();\n    }\n\n}\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.antutu\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Thu Jun 08 14:26:39 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/antutu/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/apache.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom __future__ import division\nimport os\n# pylint: disable=wrong-import-order,wrong-import-position\nfrom future.standard_library import install_aliases\ninstall_aliases()\n\nfrom urllib.request import urlopen  # pylint: disable=import-error\n\nfrom wa import Workload, Parameter, Alias, WorkloadError\nfrom wa.utils.exec_control import once\nfrom wa.utils.misc import which, check_output\n\n\nclass ApacheBenchmark(Workload):\n\n    name = 'apache'\n    description = '''\n    Load-test an apache installation by issueing parallel requests with ab.\n\n    Run ab, the Apache benchmark on the host, directed at the target as the\n    server.\n\n    .. note:: It is assumed that Apache is already running on target.\n\n    .. note:: Current implmentation only supports a very basic use of the\n              benchmark.\n\n    '''\n\n    parameters = [\n        Parameter('port', kind=int, default=80,\n                  description='''\n                  Port on which Apache is running.\n                  '''),\n        Parameter('path', default='/',\n                  description='''\n                  Path to request.\n                  '''),\n        Parameter('parallel_requests', kind=int, default=350,\n                  description='''\n                  The number of parallel requests at a time.\n                  '''),\n        Parameter('total_requests', kind=int, default=100000,\n                  description='''\n                  The total number of parallel requests.\n                  '''),\n    ]\n\n    aliases = [\n        Alias('ab'),\n    ]\n\n    supported_targets = ['linux']\n\n    @once\n    def initialize(self, context):\n        ab = which('ab')\n        if not ab:\n            msg = 'ab not found on host; make sure apache2-utils (or you distro equivalent) package is installed.'\n            raise WorkloadError(msg)\n\n        response = urlopen('http://{}:{}{}'.format(self.target.conn.host, self.port, self.path))\n        code = response.getcode()\n        if code != 200:\n            msg = 'HTTP request failed with status {}; is Apache running on target?'\n            raise WorkloadError(msg.format(code))\n\n    def setup(self, context):\n        template = 'ab -k -c {} -n {} {}:{}{}'\n        self.command = template.format(self.parallel_requests,\n                                       self.total_requests,\n                                       self.target.conn.host,\n                                       self.port,\n                                       self.path)\n        self.output = None\n\n    def run(self, context):\n        self.logger.debug(self.command)\n        self.output, _ = check_output(self.command, timeout=300, shell=True)\n\n    def extract_results(self, context):\n        outfile = os.path.join(context.output_directory, 'ab.output')\n        with open(outfile, 'w') as wfh:\n            wfh.write(self.output)\n            context.add_artifact('ab-output', outfile, kind='raw')\n\n    def update_output(self, context):  # pylint: disable=too-many-locals\n        with open(context.get_artifact_path('ab-output')) as fh:\n            server_software = get_line(fh, 'Server Software').split(':')[1].strip()\n            context.add_metadata('server-software', server_software)\n\n            doc_len_str = get_line(fh, 'Document Length').split(':')[1].strip()\n            doc_len = int(doc_len_str.split()[0])\n            context.add_metadata('document-length', doc_len)\n\n            completed = int(get_line(fh, 'Complete requests').split(':')[1].strip())\n            failed = int(get_line(fh, 'Failed requests').split(':')[1].strip())\n            fail_rate = failed / completed * 100\n            context.add_metric('failed_request', fail_rate, units='percent',\n                               lower_is_better=True)\n\n            rps_str = get_line(fh, 'Requests per second').split(':')[1].strip()\n            rps = float(rps_str.split('[')[0])\n            rps_units = rps_str.split('[')[1].split(']')[0]\n            context.add_metric('requests_per_second', rps, units=rps_units)\n\n            tpr_str = get_line(fh, 'Time per request').split(':')[1].strip()\n            tpr = float(tpr_str.split('[')[0])\n            tpr_units = tpr_str.split('[')[1].split(']')[0]\n            context.add_metric('time_per_request', tpr, units=tpr_units)\n\n            trate_str = get_line(fh, 'Transfer rate').split(':')[1].strip()\n            trate = float(trate_str.split('[')[0])\n            trate_units = trate_str.split('[')[1].split(']')[0]\n            context.add_metric('transfer_rate', trate, units=trate_units)\n\n            pc99 = int(get_line(fh, '99%').split()[1])\n            context.add_metric('request_99percentile', pc99, 'ms')\n\n            pc100 = int(get_line(fh, '100%').split()[1])\n            context.add_metric('longest_request', pc100, 'ms')\n\n\ndef get_line(fh, text):\n    for line in fh:\n        if text in line:\n            return line\n"
  },
  {
    "path": "wa/workloads/applaunch/__init__.py",
    "content": "#    Copyright 2015-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the 'License');\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an 'AS IS' BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# pylint: disable=attribute-defined-outside-init\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.framework import pluginloader\n\n\nclass Applaunch(ApkUiautoWorkload):\n\n    name = 'applaunch'\n    description = '''\n    This workload launches and measures the launch time of applications for supporting workloads.\n\n    Currently supported workloads are the ones that implement ``ApplaunchInterface``. For any\n    workload to support this workload, it should implement the ``ApplaunchInterface``.\n    The corresponding java file of the workload associated with the application being measured\n    is executed during the run. The application that needs to be\n    measured is passed as a parameter ``workload_name``. The parameters required for that workload\n    have to be passed as a dictionary which is captured by the parameter ``workload_params``.\n    This information can be obtained by inspecting the workload details of the specific workload.\n\n    The workload allows to run multiple iterations of an application\n    launch in two modes:\n\n    1. Launch from background\n    2. Launch from long-idle\n\n    These modes are captured as a parameter applaunch_type.\n\n    ``launch_from_background``\n        Launches an application after the application is sent to background by\n        pressing Home button.\n\n    ``launch_from_long-idle``\n        Launches an application after killing an application process and\n        clearing all the caches.\n\n    **Test Description:**\n\n    -   During the initialization and setup, the application being launched is launched\n        for the first time. The jar file of the workload of the application\n        is moved to device at the location ``workdir`` which further implements the methods\n        needed to measure the application launch time.\n\n    -   Run phase calls the UiAutomator of the applaunch which runs in two subphases.\n            A.  Applaunch Setup Run:\n                    During this phase, welcome screens and dialogues during the first launch\n                    of the instrumented application are cleared.\n            B.  Applaunch Metric Run:\n                    During this phase, the application is launched multiple times determined by\n                    the iteration number specified by the parameter ``applaunch_iterations``.\n                    Each of these iterations are instrumented to capture the launch time taken\n                    and the values are recorded as UXPERF marker values in logfile.\n    '''\n    supported_platforms = ['android']\n\n    parameters = [\n        Parameter('workload_name', kind=str,\n                  description='Name of the uxperf workload to launch',\n                  default='gmail'),\n        Parameter('workload_params', kind=dict, default={},\n                  description=\"\"\"\n                  parameters of the uxperf workload whose application launch\n                  time is measured\n                  \"\"\"),\n        Parameter('applaunch_type', kind=str, default='launch_from_background',\n                  allowed_values=['launch_from_background', 'launch_from_long-idle'],\n                  description=\"\"\"\n                  Choose launch_from_long-idle for measuring launch time\n                  from long-idle. These two types are described in the workload\n                  description.\n                  \"\"\"),\n        Parameter('applaunch_iterations', kind=int, default=1,\n                  description=\"\"\"\n                  Number of iterations of the application launch\n                  \"\"\"),\n    ]\n\n    def init_resources(self, context):\n        super(Applaunch, self).init_resources(context)\n        self.workload_params['markers_enabled'] = True\n        # pylint: disable=no-member\n        self.workload = pluginloader.get_workload(self.workload_name, self.target,\n                                                  **self.workload_params)\n        self.workload.init_resources(context)\n        self.workload.initialize(context)\n        self.package_names = self.workload.package_names\n        self.pass_parameters()\n        # Deploy test workload uiauto apk\n        self.asset_files.append(self.workload.gui.uiauto_file)\n\n    def pass_parameters(self):\n        self.gui.uiauto_params['workload'] = self.workload.name\n        self.gui.uiauto_params['package_name'] = self.workload.package\n        self.gui.uiauto_params.update(self.workload.gui.uiauto_params)\n        if self.workload.apk.activity:\n            self.gui.uiauto_params['launch_activity'] = self.workload.apk.activity\n        else:\n            self.gui.uiauto_params['launch_activity'] = \"None\"\n        self.gui.uiauto_params['applaunch_type'] = self.applaunch_type\n        self.gui.uiauto_params['applaunch_iterations'] = self.applaunch_iterations\n\n    def setup(self, context):\n        self.workload.gui.uiauto_params['package_name'] = self.workload.apk.apk_info.package\n        self.workload.gui.init_commands()\n        self.workload.gui.deploy()\n        super(Applaunch, self).setup(context)\n\n    def finalize(self, context):\n        super(Applaunch, self).finalize(context)\n        self.workload.finalize(context)\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.applaunch\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.applaunch\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\"\n    android:allowBackup=\"false\">\n\n    <uses-permission android:name=\"android.permission.READ_EXTERNAL_STORAGE\"/>\n    <uses-permission android:name=\"android.permission.WRITE_EXTERNAL_STORAGE\"/>\n    <uses-permission android:name=\"android.permission.KILL_BACKGROUND_PROCESSES\"/>\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/app/src/main/java/com/arm/wa/uiauto/applaunch/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.applaunch;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.UxPerfUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\n\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.io.File;\n\nimport dalvik.system.DexClassLoader;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n    /**\n     * Uiobject that marks the end of launch of an application, which is workload\n     * specific and added in the workload Java file by a method called getLaunchEndObject().\n     */\n    public UiObject launchEndObject;\n    /** Timeout to wait for application launch to finish. */\n    private Integer launch_timeout = 10;\n    public String applaunchType;\n    public int applaunchIterations;\n    public String activityName;\n    public ApplaunchInterface launch_workload;\n\n    protected Bundle parameters;\n    protected String packageName;\n    protected String packageID;\n\n    @Before\n    public void initialize() throws Exception {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n\n        // Get workload apk file parameters\n        String packageName = parameters.getString(\"package_name\");\n        String workload = parameters.getString(\"workload\");\n        String workloadAPKPath = parameters.getString(\"workdir\");\n        String workloadName = String.format(\"com.arm.wa.uiauto.%1s.apk\", workload);\n        String workloadAPKFile = String.format(\"%1s/%2s\", workloadAPKPath, workloadName);\n\n        // Load the apk file\n        File apkFile = new File(workloadAPKFile);\n        File dexLocation = mContext.getDir(\"outdex\", 0);\n        if(!apkFile.exists()) {\n            throw new Exception(String.format(\"APK file not found: %s \", workloadAPKFile));\n        }\n        DexClassLoader classloader = new DexClassLoader(apkFile.toURI().toURL().toString(),\n                                                        dexLocation.getAbsolutePath(),\n                                                        null, mContext.getClassLoader());\n\n        Class uiautomation = null;\n        Object uiautomation_interface = null;\n        String workloadClass = String.format(\"com.arm.wa.uiauto.%1s.UiAutomation\", workload);\n        try {\n            uiautomation = classloader.loadClass(workloadClass);\n        } catch (ClassNotFoundException e) {\n            e.printStackTrace();\n        }\n\n        Log.d(\"Class loaded:\", uiautomation.getCanonicalName());\n        uiautomation_interface = uiautomation.newInstance();\n\n        // Create an Application Interface object from the workload\n        launch_workload = ((ApplaunchInterface)uiautomation_interface);\n        launch_workload.initialize_instrumentation();\n        launch_workload.setWorkloadParameters(parameters);\n\n        // Get parameters for application launch\n        applaunchType = parameters.getString(\"applaunch_type\");\n        applaunchIterations = parameters.getInt(\"applaunch_iterations\");\n        activityName = parameters.getString(\"launch_activity\");\n    }\n\n    /**\n     * Setup run for applaunch workload that clears the initial\n     * run dialogues on launching an application package.\n     */\n    @Test\n    public void setup() throws Exception {\n        mDevice.setOrientationNatural();\n        launch_workload.runApplicationSetup();\n        unsetScreenOrientation();\n        closeApplication();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        launchEndObject = launch_workload.getLaunchEndObject();\n        for (int iteration = 0; iteration < applaunchIterations; iteration++) {\n            Log.d(\"Applaunch iteration number: \", String.valueOf(applaunchIterations));\n            sleep(20);//sleep for a while before next iteration\n            killBackground();\n            runApplaunchIteration(iteration);\n            closeApplication();\n        }\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        mDevice.unfreezeRotation();\n    }\n\n    /**\n     * This method performs multiple iterations of application launch and\n     * records the time taken for each iteration.\n     */\n    public void runApplaunchIteration(Integer iteration_count) throws Exception{\n        String testTag = \"applaunch\" + iteration_count;\n        String launchCommand = launch_workload.getLaunchCommand();\n        AppLaunch applaunch = new AppLaunch(testTag, launchCommand);\n        applaunch.startLaunch();  // Launch the application and start timer\n        applaunch.endLaunch();  // marks the end of launch and stops timer\n    }\n\n    /*\n     * AppLaunch class implements methods that facilitates launching applications\n     * from the uiautomator. It has methods that are used for one complete iteration of application\n     * launch instrumentation.\n     * ActionLogger class is instantiated within the class for measuring applaunch time.\n     * startLaunch(): Marks the beginning of the application launch, starts Timer\n     * endLaunch(): Marks the end of application, ends Timer\n     * launchMain(): Starts the application launch process and validates the finish of launch.\n    */\n    private class AppLaunch {\n\n        private String testTag;\n        private String launchCommand;\n        private ActionLogger logger;\n        Process launch_p;\n\n        public AppLaunch(String testTag, String launchCommand) {\n            this.testTag = testTag;\n            this.launchCommand = launchCommand;\n            this.logger = new ActionLogger(testTag, parameters);\n        }\n\n        // Beginning of application launch\n        public void startLaunch() throws Exception{\n            logger.start();\n            launchMain();\n        }\n\n        // Launches the application.\n        public void launchMain() throws Exception{\n            launch_p = Runtime.getRuntime().exec(launchCommand);\n            launchValidate(launch_p);\n        }\n\n        // Called by launchMain() to check if app launch is successful\n        public void launchValidate(Process launch_p) throws Exception {\n            launch_p.waitFor();\n            Integer exit_val = launch_p.exitValue();\n            if (exit_val != 0) {\n                throw new Exception(\"Application could not be launched\");\n            }\n        }\n\n        // Marks the end of application launch of the workload.\n        public void endLaunch() throws Exception{\n            waitObject(launchEndObject, launch_timeout);\n            logger.stop();\n            launch_p.destroy();\n        }\n    }\n\n    // Exits the application according to application launch type.\n    public void closeApplication() throws Exception{\n        if(applaunchType.equals(\"launch_from_background\")) {\n            pressHome();\n        }\n        else if(applaunchType.equals(\"launch_from_long-idle\")) {\n            killApplication();\n            dropCaches();\n        }\n    }\n\n    // Kills the application process\n    public void killApplication() throws Exception{\n        Process kill_p;\n        String command = String.format(\"am force-stop %s\", packageName);\n        kill_p = Runtime.getRuntime().exec(new String[] { \"su\", \"-c\", command});\n        kill_p.waitFor();\n        kill_p.destroy();\n    }\n\n    // Kills the background processes\n    public void killBackground() throws Exception{\n        Process kill_p;\n        kill_p = Runtime.getRuntime().exec(\"am kill-all\");\n        kill_p.waitFor();\n        kill_p.destroy();\n    }\n\n    // Drop the caches\n    public void dropCaches() throws Exception{\n        Process sync;\n        sync = Runtime.getRuntime().exec(new String[] { \"su\", \"-c\", \"sync\"});\n        sync.waitFor();\n        sync.destroy();\n\n        Process drop_cache;\n        String command = \"echo 3 > /proc/sys/vm/drop_caches\";\n        drop_cache = Runtime.getRuntime().exec(new String[] { \"su\", \"-c\", command});\n        drop_cache.waitFor();\n        drop_cache.destroy();\n    }\n}\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.applaunch\n\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Thu Jun 08 14:21:45 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/applaunch/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport re\n\nfrom wa import ApkUiautoWorkload\n\n\nclass BenchmarkPi(ApkUiautoWorkload):\n\n    name = 'benchmarkpi'\n    description = \"\"\"\n    Measures the time the target device takes to run and complete the Pi\n    calculation algorithm.\n\n    http://androidbenchmark.com/howitworks.php\n\n    from the website:\n\n    The whole idea behind this application is to use the same Pi calculation\n    algorithm on every Android Device and check how fast that process is.\n    Better calculation times, conclude to faster Android devices. This way you\n    can also check how lightweight your custom made Android build is. Or not.\n\n    As Pi is an irrational number, Benchmark Pi does not calculate the actual Pi\n    number, but an approximation near the first digits of Pi over the same\n    calculation circles the algorithms needs.\n\n    So, the number you are getting in milliseconds is the time your mobile device\n    takes to run and complete the Pi calculation algorithm resulting in a\n    approximation of the first Pi digits.\n    \"\"\"\n    package_names = ['gr.androiddev.BenchmarkPi']\n    activity = '.BenchmarkPi'\n\n    regex = re.compile('You calculated Pi in ([0-9]+)')\n\n    def update_output(self, context):\n        super(BenchmarkPi, self).update_output(context)\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                match = self.regex.search(line)\n                if match:\n                    result = int(match.group(1))\n\n        if result is not None:\n            context.add_metric('pi calculation', result,\n                               'milliseconds', lower_is_better=True)\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.benchmarkpi\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.benchmarkpi\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/app/src/main/java/com/arm/wa/uiauto/benchmarkpi/UiAutomation.java",
    "content": "/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.benchmarkpi;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiSelector;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"benchmarkpi\";\n\n    public Bundle parameters;\n    public String packageID;\n\n    @Test\n    public void setup() throws Exception {\n        dismissAndroidVersionPopup();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        startTest();\n        waitForResults();\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject resultsText = mDevice.findObject(selector.textContains(\"You calculated Pi in\")\n                                                          .className(\"android.widget.TextView\"));\n        Log.v(TAG, resultsText.getText());\n    }\n\n    public void startTest() throws Exception{\n        UiSelector selector = new UiSelector();\n        UiObject benchButton = mDevice.findObject(selector.text(\"Benchmark my Android!\")\n                                                          .className(\"android.widget.Button\"));\n        benchButton.click();\n    }\n\n    public void waitForResults() throws Exception{\n        UiSelector selector = new UiSelector();\n        UiObject submitButton = mDevice.findObject(selector.text(\"Submit\")\n                                                           .className(\"android.widget.Button\"));\n        submitButton.waitForExists(10 * 1000);\n    }\n}\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.benchmarkpi\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/benchmarkpi/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/chrome/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom wa import Parameter, ApkUiautoWorkload\nfrom wa.framework.exception import WorkloadError\n\n\nclass Chrome(ApkUiautoWorkload):\n\n    name = 'chrome'\n    description = '''\n    A workload to perform standard Web browsing tasks with Google Chrome. The\n    workload carries out a number of typical Web-based tasks, navigating through\n    a handful of Wikipedia pages in multiple browser tabs.\n\n    To run the workload in offline mode, a ``pages.tar`` archive and an\n    ``OfflinePages.db`` file are required. For users wishing to generate these\n    files themselves, Chrome should first be operated from an Internet-connected\n    environment and the following Wikipedia pages should be downloaded for\n    offline use within Chrome:\n\n    - https://en.m.wikipedia.org/wiki/Main_Page\n    - https://en.m.wikipedia.org/wiki/United_States\n    - https://en.m.wikipedia.org/wiki/California\n\n    Following this, the files of interest for viewing these pages offline can be\n    found in the ``/data/data/com.android.chrome/app_chrome/Default/Offline\n    Pages`` directory. The ``OfflinePages.db`` file can be copied from the\n    'metadata' subdirectory, while the ``*.mhtml`` files that should make up the\n    ``pages.tar`` file can be found in the 'archives' subdirectory. These page\n    files can then be archived to produce a tarball using a command such as\n    ``tar -cvf pages.tar -C /path/to/archives .``.  Both this and\n    ``OfflinePages.db`` should then be placed in the\n    ``~/.workload_automation/dependencies/chrome/`` directory on your local\n    machine, creating this if it does not already exist.\n\n    Known working APK version: 65.0.3325.109\n    '''\n    package_names = ['com.android.chrome']\n\n    parameters = [\n        Parameter('offline_mode', kind=bool, default=False, description='''\n                  If set to ``True``, the workload will execute in offline mode.\n                  This mode requires root and makes use of a tarball of \\*.mhtml\n                  files 'pages.tar' and an metadata database 'OfflinePages.db'.\n                  The tarball is extracted directly to the application's offline\n                  pages 'archives' directory, while the database is copied to\n                  the offline pages 'metadata' directory.\n                  '''),\n    ]\n\n    @property\n    def requires_network(self):\n        return not self.offline_mode\n\n    @property\n    def requires_rerun(self):\n        # In offline mode we need to restart the application after modifying its data directory\n        return self.offline_mode\n\n    def __init__(self, target, **kwargs):\n        super(Chrome, self).__init__(target, **kwargs)\n        if self.offline_mode:\n            self.deployable_assets = ['pages.tar', 'OfflinePages.db']\n            self.cleanup_assets = True\n\n    def initialize(self, context):\n        super(Chrome, self).initialize(context)\n        if self.offline_mode and not self.target.is_rooted:\n            raise WorkloadError('This workload requires root to set up Chrome for offline usage.')\n\n    def setup_rerun(self):\n        super(Chrome, self).setup_rerun()\n        offline_pages = self.target.path.join(self.target.package_data_directory, self.package, 'app_chrome', 'Default', 'Offline\\ Pages')\n        metadata_src = self.target.path.join(self.target.working_directory, 'OfflinePages.db')\n        metadata_dst = self.target.path.join(offline_pages, 'metadata')\n        archives_src = self.target.path.join(self.target.working_directory, 'pages.tar')\n        archives_dst = self.target.path.join(offline_pages, 'archives')\n        owner = self.target.execute(\"{} stat -c '%u' {}\".format(self.target.busybox, offline_pages), as_root=True).strip()\n        self.target.execute('{} tar -xvf {} -C {}'.format(self.target.busybox, archives_src, archives_dst), as_root=True)\n        self.target.execute('{} cp {} {}'.format(self.target.busybox, metadata_src, metadata_dst), as_root=True)\n        self.target.execute('{0} chown -R {1}:{1} {2}'.format(self.target.busybox, owner, offline_pages), as_root=True)\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 18\n    buildToolsVersion '25.0.0'\n    defaultConfig {\n        applicationId \"com.arm.wa.uiauto.chrome\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"com.arm.wa.uiauto.chrome.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.chrome\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"com.arm.wa.uiauto.chrome\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/app/src/main/java/com/arm/wa/uiauto/UiAutomation.java",
    "content": "/*    Copyright 2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto.chrome;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport android.support.test.runner.AndroidJUnit4;\n\nimport android.util.Log;\nimport android.view.KeyEvent;\n\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.UiAutoUtils;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation implements ApplaunchInterface {\n\n    protected Bundle parameters;\n    protected String packageID;\n\n    public static String TAG = \"chrome\";\n\n    @Before\n    public void initialize() throws Exception {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n    }\n\n    @Test\n    public void setup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        runApplicationSetup();\n    }\n\n    public void navigateToPage(String url, boolean from_new_tab) throws Exception {\n        UiObject searchBar, urlBar;\n\n        if (from_new_tab) {\n                // On the new tab page, click on the search box to turn it into a url bar\n                searchBar = mDevice.findObject(new UiSelector().resourceId(packageID + \"search_box_text\")\n                                                               .className(\"android.widget.EditText\"));\n                searchBar.click();\n        }\n\n        // Navigate to the specified URL\n        urlBar = mDevice.findObject(new UiSelector().resourceId(packageID + \"url_bar\")\n                                                    .className(\"android.widget.EditText\"));\n        urlBar.click();\n        urlBar.setText(url);\n        pressEnter();\n    }\n\n    public void newTab() throws Exception {\n        UiObject tabSwitcher, newTab;\n\n        // Activate the tab switcher\n        tabSwitcher = mDevice.findObject(new UiSelector().resourceId(packageID + \"tab_switcher_button\")\n                                                         .className(\"android.widget.ImageButton\"));\n        if (tabSwitcher.exists()){\n            tabSwitcher.clickAndWaitForNewWindow(uiAutoTimeout);\n            // Click the New Tab button\n            newTab = mDevice.findObject(new UiSelector().resourceId(packageID + \"new_tab_button\")\n                                                        .className(\"android.widget.Button\"));\n            newTab.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n        // Support Tablet devices which do not have tab switcher\n        else {\n            UiObject menu_button = mDevice.findObject(new UiSelector().resourceId(packageID + \"menu_button\")\n                                                              .className(\"android.widget.ImageButton\"));\n            menu_button.click();\n            newTab = mDevice.findObject(new UiSelector().resourceId(packageID + \"menu_item_text\")\n                                                        .textContains(\"New tab\"));\n            newTab.click();\n        }\n    }\n\n    public void followTextLink(String text) throws Exception {\n        UiObject link = mDevice.findObject(new UiSelector().text(text).clickable(true));\n        link.waitForExists(uiAutoTimeout);\n        link.clickAndWaitForNewWindow();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        // Initial browsing within a single tab\n        navigateToPage(\"https://en.m.wikipedia.org/wiki/Main_Page\", true);\n        uiDeviceSwipeUp(100);\n        sleep(1);\n        uiDeviceSwipeUp(100);\n        sleep(1);\n        uiDeviceSwipeUp(250);\n        sleep(1);\n        uiDeviceSwipeDown(100);\n        navigateToPage(\"https://en.m.wikipedia.org/wiki/United_States\", false);\n        uiDeviceSwipeUp(100);\n        sleep(1);\n        uiDeviceSwipeUp(250);\n        sleep(1);\n        uiDeviceSwipeDown(100);\n\n        // URL entry and link navigation within a new tab\n        newTab();\n        navigateToPage(\"https://en.m.wikipedia.org/wiki/California\", true);\n        sleep(2);\n        followTextLink(\"United States\");\n        uiDeviceSwipeDown(50);\n        sleep(1);\n        uiDeviceSwipeUp(10);\n        sleep(3);\n\n        // Pinch to zoom, scroll around\n        UiObject webView = mDevice.findObject(new UiSelector().className(\"android.webkit.WebView\"));\n        uiObjectVertPinchOut(webView, 100, 50);\n        uiDeviceSwipeUp(300);\n        sleep(1);\n        uiObjectVertPinchIn(webView, 100, 50);\n        uiDeviceSwipeUp(100);\n        sleep(1);\n        uiDeviceSwipeUp(100);\n        sleep(3);\n\n        // Go back a page\n        pressBack();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n    public void runApplicationSetup() throws Exception {\n        UiObject sendReportBox;\n        UiObject acceptButton, noThanksButton;\n\n        sendReportBox = mDevice.findObject(new UiSelector().resourceId(packageID + \"send_report_checkbox\")\n                                                           .className(\"android.widget.CheckBox\"));\n        sendReportBox.click();\n\n        acceptButton = mDevice.findObject(new UiSelector().resourceId(packageID + \"terms_accept\")\n                                                          .className(\"android.widget.Button\"));\n        acceptButton.clickAndWaitForNewWindow(uiAutoTimeout);\n\n        noThanksButton = mDevice.findObject(new UiSelector().resourceId(packageID + \"negative_button\")\n                                                            .className(\"android.widget.Button\"));\n        noThanksButton.clickAndWaitForNewWindow(uiAutoTimeout);\n    }\n\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject = mDevice.findObject(new UiSelector().className(\"android.widget.EditText\"));\n        return launchEndObject;\n    }\n\n    public String getLaunchCommand() {\n        String launch_command = UiAutoUtils.createLaunchCommand(parameters);\n        return launch_command;\n    }\n\n    public void setWorkloadParameters(Bundle workload_parameters) {\n        parameters = workload_parameters;\n        packageID = getPackageID(parameters);\n    }\n}\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wlauto dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\nrm -f ../com.arm.wa.uiauto.chrome\nif [[ -f app/build/outputs/apk/debug/com.arm.wa.uiauto.chrome.apk ]]; then\n    cp app/build/outputs/apk/debug/com.arm.wa.uiauto.chrome.apk ../com.arm.wa.uiauto.chrome.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/chrome/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/deepbench/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\nimport re\n\nimport pandas as pd\n\nfrom wa import Workload, Parameter, Alias, Executable\nfrom wa.utils.types import numeric\n\n\nclass Deepbench(Workload):\n\n    name = 'deepbench'\n    description = \"\"\"\n    Benchmarks operations that are important to deep learning. Including GEMM\n    and convolution.\n\n    The benchmark and its documentation are available here:\n\n        https://github.com/baidu-research/DeepBench\n\n\n    .. note:: parameters of matrices used in each sub-test are added as\n              classifiers to the metrics. See the benchmark documentation\n              for the explanation of the various parameters\n\n    .. note:: at the moment only the \"Arm Benchmarks\" subset of DeepBench\n              is supported.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('test', default='gemm',\n                  allowed_values=['gemm', 'conv', 'sparse'],\n                  description='''\n                  Specifies which of the available benchmarks will be run.\n\n                  gemm\n                    Performs GEneral Matrix Multiplication of dense matrices\n                    of varying sizes.\n\n                  conv\n                    Performs convolutions on inputs in NCHW format.\n\n                  sparse\n                    Performs GEneral Matrix Multiplication of sparse matrices\n                    of varying sizes, and compares them to corresponding dense\n                    operations.\n\n                  '''),\n    ]\n\n    aliases = [\n        Alias('deep-gemm', test='gemm'),\n        Alias('deep-conv', test='conv'),\n        Alias('deep-sparse', test='sparse'),\n    ]\n\n    test_metrics = {\n        'gemm': ['time (msec)', 'GOPS'],\n        'conv': ['fwd_time (usec)'],\n        'sparse': ['sparse time (usec)', 'dense time (usec)', 'speedup'],\n    }\n\n    lower_is_better = {\n        'time (msec)': True,\n        'GOPS': False,\n        'fwd_time (usec)': True,\n        'sparse time (usec)': True,\n        'dense time (usec)': True,\n        'speedup': False,\n    }\n\n    installed = {}\n\n    def initialize(self, context):\n        self.exe_name = '{}_bench'.format(self.test)\n        if self.exe_name not in self.installed:\n            resource = Executable(self, self.target.abi, self.exe_name)\n            host_exe = context.get_resource(resource)\n            self.target.killall(self.exe_name)\n            self.installed[self.exe_name] = self.target.install(host_exe)\n        self.target_exe = self.installed[self.exe_name]\n\n    def setup(self, context):\n        self.target.killall(self.exe_name)\n\n    def run(self, context):\n        self.output = None\n        try:\n            timeout = 10800\n            self.output = self.target.execute(self.target_exe, timeout=timeout)\n        except KeyboardInterrupt:\n            self.target.killall(self.exe_name)\n            raise\n\n    def extract_results(self, context):\n        if self.output:\n            outfile = os.path.join(context.output_directory, '{}.output'.format(self.test))\n            with open(outfile, 'w') as wfh:\n                wfh.write(self.output)\n            context.add_artifact('deepbench-output', outfile, 'raw', \"deepbench's stdout\")\n\n    def update_output(self, context):\n        raw_file = context.get_artifact_path('deepbench-output')\n        if not raw_file:\n            return\n        table = read_result_table(raw_file)\n        for _, row in table.iterrows():\n            items = dict(row)\n\n            metrics = []\n            for metric_name in self.test_metrics[self.test]:\n                metrics.append((metric_name, items.pop(metric_name)))\n\n            for name, value in metrics:\n                context.add_metric(name, value,\n                                   lower_is_better=self.lower_is_better[name],\n                                   classifiers=items)\n\n    def finalize(self, context):\n        if self.cleanup_assets:\n            if self.exe_name in self.installed:\n                self.target.uninstall(self.exe_name)\n                del self.installed[self.exe_name]\n\n\ndef numeric_best_effort(value):\n    try:\n        return numeric(value)\n    except ValueError:\n        return value\n\n\ndef read_result_table(filepath):\n    columns = []\n    entries = []\n    with open(filepath) as fh:\n        try:\n            # fast-forward to the header\n            line = next(fh)\n            while not line.startswith('----'):\n                line = next(fh)\n            header_line = next(fh)\n            haader_sep = re.compile(r'(?<=[) ]) ')\n            # Since headers can contain spaces, use two spaces as column separator\n            parts = [p.strip() for p in haader_sep.split(header_line)]\n            columns = [p for p in parts if p]\n\n            line = next(fh)\n            while line.strip():\n                if line.startswith('----'):\n                    line = next(fh)\n                row = [numeric_best_effort(i) for i in line.strip().split()]\n                entries.append(row)\n                line = next(fh)\n        except StopIteration:\n            pass\n\n    return pd.DataFrame(entries, columns=columns)\n"
  },
  {
    "path": "wa/workloads/dhrystone/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\nimport re\n\nfrom wa import Workload, Parameter, ConfigError, Executable\nfrom wa.utils.exec_control import once\nfrom wa.utils.types import cpu_mask\n\n\nclass Dhrystone(Workload):\n\n    name = 'dhrystone'\n    description = \"\"\"\n    Runs the Dhrystone benchmark.\n\n    Original source from::\n\n        http://classes.soe.ucsc.edu/cmpe202/benchmarks/standard/dhrystone.c\n\n    This version has been modified to configure duration and the number of\n    threads used.\n\n    \"\"\"\n\n    bm_regex = re.compile(r'This machine benchmarks at (?P<score>\\d+)')\n    dmips_regex = re.compile(r'(?P<score>\\d+) DMIPS')\n    time_regex = re.compile(r'Total dhrystone run time: (?P<time>[0-9.]+)')\n\n    default_mloops = 100\n\n    parameters = [\n        Parameter('duration', kind=int, default=0,\n                  description='''\n                  The duration, in seconds, for which dhrystone will be\n                  executed. Either this or ``mloops`` should be specified but\n                  not both.\n                  '''),\n        Parameter('mloops', kind=int, default=0,\n                  description='''\n                  Millions of loops to run. Either this or ``duration`` should\n                  be specified, but not both. If neither is specified, this\n                  will default ' to ``{}``\n                  '''.format(default_mloops)),\n        Parameter('threads', kind=int, default=4,\n                  description='''\n                  The number of separate dhrystone \"threads\" that will be forked.\n                  '''),\n        Parameter('delay', kind=int, default=0,\n                  description=('''\n                  The delay, in seconds, between kicking off of dhrystone\n                  threads (if ``threads`` > 1).\n                  ''')),\n        Parameter('cpus', kind=cpu_mask, default=0, aliases=['taskset_mask'],\n                  description=''' The processes spawned by dhrystone will be\n                  pinned to cores as specified by this parameter. The mask can\n                  be specified directly as a mask, as a list of cpus or a sysfs-\n                  style string '''),\n    ]\n\n    @once\n    def initialize(self, context):\n        resource = Executable(self, self.target.abi, 'dhrystone')\n        host_exe = context.get_resource(resource)\n        Dhrystone.target_exe = self.target.install(host_exe)\n\n    def setup(self, context):\n        if self.mloops:\n            execution_mode = '-l {}'.format(self.mloops)\n        else:\n            execution_mode = '-r {}'.format(self.duration)\n        if self.cpus:\n            taskset_string = '{} taskset {} '.format(self.target.busybox,\n                                                     self.cpus.mask())\n        else:\n            taskset_string = ''\n        self.command = '{}{} {} -t {} -d {}'.format(taskset_string,\n                                                    self.target_exe,\n                                                    execution_mode,\n                                                    self.threads, self.delay)\n        if self.duration:\n            self.timeout = self.duration + self.delay * self.threads + 10\n        else:\n            self.timeout = 300\n\n        self.target.killall('dhrystone')\n\n    def run(self, context):\n        self.output = None\n        try:\n            self.output = self.target.execute(self.command,\n                                              timeout=self.timeout,\n                                              check_exit_code=False)\n        except KeyboardInterrupt:\n            self.target.killall('dhrystone')\n            raise\n\n    def extract_results(self, context):\n        if self.output:\n            outfile = os.path.join(context.output_directory, 'dhrystone.output')\n            with open(outfile, 'w') as wfh:\n                wfh.write(self.output)\n            context.add_artifact('dhrystone-output', outfile, 'raw', \"dhrystone's stdout\")\n\n    def update_output(self, context):\n        if not self.output:\n            return\n\n        score_count = 0\n        dmips_count = 0\n        total_score = 0\n        total_dmips = 0\n\n        for line in self.output.split('\\n'):\n            match = self.time_regex.search(line)\n            if match:\n                context.add_metric('time', float(match.group('time')), 'seconds',\n                                   lower_is_better=True)\n            else:\n                match = self.bm_regex.search(line)\n                if match:\n                    metric = 'thread {} score'.format(score_count)\n                    value = int(match.group('score'))\n                    context.add_metric(metric, value)\n                    score_count += 1\n                    total_score += value\n                else:\n                    match = self.dmips_regex.search(line)\n                    if match:\n                        metric = 'thread {} DMIPS'.format(dmips_count)\n                        value = int(match.group('score'))\n                        context.add_metric(metric, value)\n                        dmips_count += 1\n                        total_dmips += value\n\n        context.add_metric('total DMIPS', total_dmips)\n        context.add_metric('total score', total_score)\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall('dhrystone')\n\n    def validate(self):\n        if self.mloops and self.duration:  # pylint: disable=E0203\n            msg = 'mloops and duration cannot be both specified at the '\\\n                  'same time for dhrystone.'\n            raise ConfigError(msg)\n        if not self.mloops and not self.duration:  # pylint: disable=E0203\n            self.mloops = self.default_mloops\n"
  },
  {
    "path": "wa/workloads/dhrystone/src/Makefile",
    "content": "#    Copyright 2013-2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\ndhrystone: dhrystone.c\n\t$(CROSS_COMPILE)gcc -O3 -static dhrystone.c -o dhrystone\n"
  },
  {
    "path": "wa/workloads/dhrystone/src/dhrystone.c",
    "content": "/* ARM modifications to the original Dhrystone are */\n/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\n/***** hpda:net.sources / homxb!gemini /  1:58 am  Apr  1, 1986*/\n/*\tEVERBODY:\tPlease read \"APOLOGY\" below. -rick 01/06/85\n *\t\t\tSee introduction in net.arch, or net.micro\n *\n *\t\"DHRYSTONE\" Benchmark Program\n *\n *\tVersion:\tC/1.1, 12/01/84\n *\n *\tDate:\t\tPROGRAM updated 01/06/86, RESULTS updated 03/31/86\n *\n *\tAuthor:\t\tReinhold P. Weicker,  CACM Vol 27, No 10, 10/84 pg. 1013\n *\t\t\tTranslated from ADA by Rick Richardson\n *\t\t\tEvery method to preserve ADA-likeness has been used,\n *\t\t\tat the expense of C-ness.\n *\n *\tCompile:\tcc -O dry.c -o drynr\t\t\t: No registers\n *\t\t\tcc -O -DREG=register dry.c -o dryr\t: Registers\n *\n *\tDefines:\tDefines are provided for old C compiler's\n *\t\t\twhich don't have enums, and can't assign structures.\n *\t\t\tThe time(2) function is library dependant; Most\n *\t\t\treturn the time in seconds, but beware of some, like\n *\t\t\tAztec C, which return other units.\n *\t\t\tThe LOOPS define is initially set for 50000 loops.\n *\t\t\tIf you have a machine with large integers and is\n *\t\t\tvery fast, please change this number to 500000 to\n *\t\t\tget better accuracy.  Please select the way to\n *\t\t\tmeasure the execution time using the TIME define.\n *\t\t\tFor single user machines, time(2) is adequate. For\n *\t\t\tmulti-user machines where you cannot get single-user\n *\t\t\taccess, use the times(2) function.  If you have\n *\t\t\tneither, use a stopwatch in the dead of night.\n *\t\t\tUse a \"printf\" at the point marked \"start timer\"\n *\t\t\tto begin your timings. DO NOT use the UNIX \"time(1)\"\n *\t\t\tcommand, as this will measure the total time to\n *\t\t\trun this program, which will (erroneously) include\n *\t\t\tthe time to malloc(3) storage and to compute the\n *\t\t\ttime it takes to do nothing.\n *\n *\tRun:\t\tdrynr; dryr\n *\n *\tResults:\tIf you get any new machine/OS results, please send to:\n *\n *\t\t\t\tihnp4!castor!pcrat!rick\n *\n *\t\t\tand thanks to all that do.  Space prevents listing\n *\t\t\tthe names of those who have provided some of these\n *\t\t\tresults.  I'll be forwarding these results to\n *\t\t\tRheinhold Weicker.\n *\n *\tNote:\t\tI order the list in increasing performance of the\n *\t\t\t\"with registers\" benchmark.  If the compiler doesn't\n *\t\t\tprovide register variables, then the benchmark\n *\t\t\tis the same for both REG and NOREG.\n *\n *\tPLEASE:\t\tSend complete information about the machine type,\n *\t\t\tclock speed, OS and C manufacturer/version.  If\n *\t\t\tthe machine is modified, tell me what was done.\n *\t\t\tOn UNIX, execute uname -a and cc -V to get this info.\n *\n *\t80x8x NOTE:\t80x8x benchers: please try to do all memory models\n *\t\t\tfor a particular compiler.\n *\n *\tAPOLOGY (1/30/86):\n *\t\tWell, I goofed things up!  As pointed out by Haakon Bugge,\n *\t\tthe line of code marked \"GOOF\" below was missing from the\n *\t\tDhrystone distribution for the last several months.  It\n *\t\t*WAS* in a backup copy I made last winter, so no doubt it\n *\t\twas victimized by sleepy fingers operating vi!\n *\n *\t\tThe effect of the line missing is that the reported benchmarks\n *\t\tare 15% too fast (at least on a 80286).  Now, this creates\n *\t\ta dilema - do I throw out ALL the data so far collected\n *\t\tand use only results from this (corrected) version, or\n *\t\tdo I just keep collecting data for the old version?\n *\n *\t\tSince the data collected so far *is* valid as long as it\n *\t\tis compared with like data, I have decided to keep\n *\t\tTWO lists- one for the old benchmark, and one for the\n *\t\tnew.  This also gives me an opportunity to correct one\n *\t\tother error I made in the instructions for this benchmark.\n *\t\tMy experience with C compilers has been mostly with\n *\t\tUNIX 'pcc' derived compilers, where the 'optimizer' simply\n *\t\tfixes sloppy code generation (peephole optimization).\n *\t\tBut today, there exist C compiler optimizers that will actually\n *\t\tperform optimization in the Computer Science sense of the word,\n *\t\tby removing, for example, assignments to a variable whose\n *\t\tvalue is never used.  Dhrystone, unfortunately, provides\n *\t\tlots of opportunities for this sort of optimization.\n *\n *\t\tI request that benchmarkers re-run this new, corrected\n *\t\tversion of Dhrystone, turning off or bypassing optimizers\n *\t\twhich perform more than peephole optimization.  Please\n *\t\tindicate the version of Dhrystone used when reporting the\n *\t\tresults to me.\n *\t\t\n * RESULTS BEGIN HERE\n *\n *----------------DHRYSTONE VERSION 1.1 RESULTS BEGIN--------------------------\n *\n * MACHINE\tMICROPROCESSOR\tOPERATING\tCOMPILER\tDHRYSTONES/SEC.\n * TYPE\t\t\t\tSYSTEM\t\t\t\tNO REG\tREGS\n * --------------------------\t------------\t-----------\t---------------\n * Apple IIe\t65C02-1.02Mhz\tDOS 3.3\t\tAztec CII v1.05i  37\t  37\n * -\t\tZ80-2.5Mhz\tCPM-80 v2.2\tAztec CII v1.05g  91\t  91\n * -\t\t8086-8Mhz\tRMX86 V6\tIntel C-86 V2.0\t 197\t 203LM??\n * IBM PC/XT\t8088-4.77Mhz\tCOHERENT 2.3.43\tMark Wiiliams\t 259\t 275\n * -\t\t8086-8Mhz\tRMX86 V6\tIntel C-86 V2.0\t 287\t 304 ??\n * Fortune 32:16 68000-6Mhz\tV7+sys3+4.1BSD  cc\t\t 360\t 346\n * PDP-11/34A\tw/FP-11C\tUNIX V7m\tcc\t\t 406\t 449\n * Macintosh512\t68000-7.7Mhz\tMac ROM O/S\tDeSmet(C ware)\t 625\t 625\n * VAX-11/750\tw/FPA\t\tUNIX 4.2BSD\tcc\t\t 831\t 852\n * DataMedia 932 68000-10Mhz\tUNIX sysV\tcc\t\t 837\t 888\n * Plexus P35\t68000-12.5Mhz\tUNIX sysIII\tcc\t\t 835\t 894\n * ATT PC7300\t68010-10Mhz\tUNIX 5.0.3\tcc\t\t 973\t1034\n * Compaq II\t80286-8Mhz\tMSDOS 3.1\tMS C 3.0 \t1086\t1140 LM\n * IBM PC/AT    80286-7.5Mhz    Venix/286 SVR2  cc              1159    1254 *15\n * Compaq II\t80286-8Mhz\tMSDOS 3.1\tMS C 3.0 \t1190\t1282 MM\n * MicroVAX II\t-\t\tMach/4.3\tcc\t\t1361\t1385\n * DEC uVAX II\t-\t\tUltrix-32m v1.1\tcc\t\t1385\t1399\n * Compaq II\t80286-8Mhz\tMSDOS 3.1\tMS C 3.0 \t1351\t1428\n * VAX 11/780\t-\t\tUNIX 4.2BSD\tcc\t\t1417\t1441\n * VAX-780/MA780\t\tMach/4.3\tcc\t\t1428\t1470\n * VAX 11/780\t-\t\tUNIX 5.0.1\tcc 4.1.1.31\t1650\t1640\n * Ridge 32C V1\t-\t\tROS 3.3\t\tRidge C (older)\t1628\t1695\n * Gould PN6005\t-\t\tUTX 1.1c+ (4.2)\tcc\t\t1732\t1884\n * Gould PN9080\tcustom ECL\tUTX-32 1.1C\tcc\t\t4745\t4992\n * VAX-784\t-\t\tMach/4.3\tcc\t\t5263\t5555 &4\n * VAX 8600\t-\t\t4.3 BSD\t\tcc\t\t6329\t6423\n * Amdahl 5860\t-\t\tUTS sysV\tcc 1.22\t       28735   28846\n * IBM3090/200\t-\t\t?\t\t?\t       31250   31250\n *\n *\n *----------------DHRYSTONE VERSION 1.0 RESULTS BEGIN--------------------------\n *\n * MACHINE\tMICROPROCESSOR\tOPERATING\tCOMPILER\tDHRYSTONES/SEC.\n * TYPE\t\t\t\tSYSTEM\t\t\t\tNO REG\tREGS\n * --------------------------\t------------\t-----------\t---------------\n * Commodore 64\t6510-1MHz\tC64 ROM\t\tC Power 2.8\t  36\t  36\n * HP-110\t8086-5.33Mhz\tMSDOS 2.11\tLattice 2.14\t 284\t 284\n * IBM PC/XT\t8088-4.77Mhz\tPC/IX\t\tcc\t\t 271\t 294\n * CCC 3205\t-\t\tXelos(SVR2) \tcc\t\t 558\t 592\n * Perq-II\t2901 bitslice\tAccent S5c \tcc (CMU)\t 301\t 301\n * IBM PC/XT\t8088-4.77Mhz\tCOHERENT 2.3.43\tMarkWilliams cc  296\t 317\n * Cosmos\t68000-8Mhz\tUniSoft\t\tcc\t\t 305\t 322\n * IBM PC/XT\t8088-4.77Mhz\tVenix/86 2.0\tcc\t\t 297\t 324\n * DEC PRO 350  11/23           Venix/PRO SVR2  cc               299     325\n * IBM PC\t8088-4.77Mhz\tMSDOS 2.0\tb16cc 2.0\t 310\t 340\n * PDP11/23\t11/23           Venix (V7)      cc               320     358\n * Commodore Amiga\t\t?\t\tLattice 3.02\t 368\t 371\n * PC/XT        8088-4.77Mhz    Venix/86 SYS V  cc               339     377\n * IBM PC\t8088-4.77Mhz\tMSDOS 2.0\tCI-C86 2.20M\t 390\t 390\n * IBM PC/XT\t8088-4.77Mhz\tPCDOS 2.1\tWizard 2.1\t 367\t 403\n * IBM PC/XT\t8088-4.77Mhz\tPCDOS 3.1\tLattice 2.15\t 403\t 403 @\n * Colex DM-6\t68010-8Mhz\tUnisoft SYSV\tcc\t\t 378\t 410\n * IBM PC\t8088-4.77Mhz\tPCDOS 3.1\tDatalight 1.10\t 416\t 416\n * IBM PC\tNEC V20-4.77Mhz\tMSDOS 3.1\tMS 3.1 \t\t 387\t 420\n * IBM PC/XT\t8088-4.77Mhz\tPCDOS 2.1\tMicrosoft 3.0\t 390\t 427\n * IBM PC\tNEC V20-4.77Mhz\tMSDOS 3.1\tMS 3.1 (186) \t 393\t 427\n * PDP-11/34\t-\t\tUNIX V7M\tcc\t\t 387\t 438\n * IBM PC\t8088, 4.77mhz\tPC-DOS 2.1\tAztec C v3.2d\t 423\t 454\n * Tandy 1000\tV20, 4.77mhz\tMS-DOS 2.11\tAztec C v3.2d\t 423\t 458\n * Tandy TRS-16B 68000-6Mhz\tXenix 1.3.5\tcc\t\t 438\t 458\n * PDP-11/34\t-\t\tRSTS/E\t\tdecus c\t\t 438\t 495\n * Onyx C8002\tZ8000-4Mhz\tIS/1 1.1 (V7)\tcc\t\t 476\t 511\n * Tandy TRS-16B 68000-6Mhz\tXenix 1.3.5\tGreen Hills\t 609\t 617\n * DEC PRO 380  11/73           Venix/PRO SVR2  cc               577     628\n * FHL QT+\t68000-10Mhz\tOs9/68000\tversion 1.3\t 603\t 649 FH\n * Apollo DN550\t68010-?Mhz\tAegisSR9/IX\tcc 3.12\t\t 666\t 666\n * HP-110\t8086-5.33Mhz\tMSDOS 2.11\tAztec-C\t\t 641\t 676 \n * ATT PC6300\t8086-8Mhz\tMSDOS 2.11\tb16cc 2.0\t 632\t 684\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tCI-C86 2.1\t 666\t 684\n * Tandy 6000\t68000-8Mhz\tXenix 3.0\tcc\t\t 694\t 694\n * IBM PC/AT\t80286-6Mhz\tXenix 3.0\tcc\t\t 684\t 704 MM\n * Macintosh\t68000-7.8Mhz 2M\tMac Rom\t\tMac C 32 bit int 694\t 704\n * Macintosh\t68000-7.7Mhz\t-\t\tMegaMax C 2.0\t 661\t 709\n * Macintosh512\t68000-7.7Mhz\tMac ROM O/S\tDeSmet(C ware)\t 714\t 714\n * IBM PC/AT\t80286-6Mhz\tXenix 3.0\tcc\t\t 704\t 714 LM\n * Codata 3300\t68000-8Mhz\tUniPlus+ (v7)\tcc\t\t 678\t 725\n * WICAT MB\t68000-8Mhz\tSystem V\tWICAT C 4.1\t 585\t 731 ~\n * Cadmus 9000\t68010-10Mhz\tUNIX\t\tcc\t\t 714\t 735\n * AT&T 6300    8086-8Mhz       Venix/86 SVR2   cc               668     743\n * Cadmus 9790\t68010-10Mhz 1MB\tSVR0,Cadmus3.7\tcc\t\t 720\t 747\n * NEC PC9801F\t8086-8Mhz\tPCDOS 2.11\tLattice 2.15\t 768\t  -  @\n * ATT PC6300\t8086-8Mhz\tMSDOS 2.11\tCI-C86 2.20M\t 769\t 769\n * Burroughs XE550 68010-10Mhz\tCentix 2.10\tcc\t\t 769\t 769 CT1\n * EAGLE/TURBO  8086-8Mhz       Venix/86 SVR2   cc               696     779\n * ALTOS 586\t8086-10Mhz\tXenix 3.0b\tcc \t\t 724\t 793\n * DEC 11/73\tJ-11 micro\tUltrix-11 V3.0\tcc\t\t 735\t 793\n * ATT 3B2/300\tWE32000-?Mhz\tUNIX 5.0.2\tcc\t\t 735\t 806\n * Apollo DN320\t68010-?Mhz\tAegisSR9/IX\tcc 3.12\t\t 806\t 806\n * IRIS-2400\t68010-10Mhz\tUNIX System V\tcc\t\t 772\t 829\n * Atari 520ST  68000-8Mhz      TOS             DigResearch      839     846\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tMS 3.0(large)\t 833\t 847 LM\n * WICAT MB\t68000-8Mhz\tSystem V\tWICAT C 4.1\t 675\t 853 S~\n * VAX 11/750\t-\t\tUltrix 1.1\t4.2BSD cc\t 781\t 862\n * CCC  7350A\t68000-8MHz\tUniSoft V.2\tcc\t\t 821\t 875\n * VAX 11/750\t-\t\tUNIX 4.2bsd\tcc\t\t 862\t 877\n * Fast Mac\t68000-7.7Mhz\t-\t\tMegaMax C 2.0\t 839\t 904 +\n * IBM PC/XT\t8086-9.54Mhz\tPCDOS 3.1\tMicrosoft 3.0\t 833\t 909 C1\n * DEC 11/44\t\t\tUltrix-11 V3.0\tcc\t\t 862\t 909\n * Macintosh\t68000-7.8Mhz 2M\tMac Rom\t\tMac C 16 bit int 877\t 909 S\n * CCC 3210\t-\t\tXelos R01(SVR2)\tcc\t\t 849\t 924\n * CCC 3220\t-               Ed. 7 v2.3      cc\t\t 892\t 925\n * IBM PC/AT\t80286-6Mhz\tXenix 3.0\tcc -i\t\t 909\t 925\n * AT&T 6300\t8086, 8mhz\tMS-DOS 2.11\tAztec C v3.2d\t 862\t 943\n * IBM PC/AT\t80286-6Mhz\tXenix 3.0\tcc\t\t 892\t 961\n * VAX 11/750\tw/FPA\t\tEunice 3.2\tcc\t\t 914\t 976\n * IBM PC/XT\t8086-9.54Mhz\tPCDOS 3.1\tWizard 2.1\t 892\t 980 C1\n * IBM PC/XT\t8086-9.54Mhz\tPCDOS 3.1\tLattice 2.15\t 980\t 980 C1\n * Plexus P35\t68000-10Mhz\tUNIX System III cc\t\t 984\t 980\n * PDP-11/73\tKDJ11-AA 15Mhz\tUNIX V7M 2.1\tcc\t\t 862     981\n * VAX 11/750\tw/FPA\t\tUNIX 4.3bsd\tcc\t\t 994\t 997\n * IRIS-1400\t68010-10Mhz\tUNIX System V\tcc\t\t 909\t1000\n * IBM PC/AT\t80286-6Mhz\tVenix/86 2.1\tcc\t\t 961\t1000\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tb16cc 2.0\t 943\t1063\n * Zilog S8000/11 Z8001-5.5Mhz\tZeus 3.2\tcc\t\t1011\t1084\n * NSC ICM-3216 NSC 32016-10Mhz\tUNIX SVR2\tcc\t\t1041\t1084\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tMS 3.0(small)\t1063\t1086\n * VAX 11/750\tw/FPA\t\tVMS\t\tVAX-11 C 2.0\t 958\t1091\n * Stride\t68000-10Mhz\tSystem-V/68\tcc\t\t1041\t1111\n * Plexus P/60  MC68000-12.5Mhz\tUNIX SYSIII\tPlexus\t\t1111\t1111\n * ATT PC7300\t68010-10Mhz\tUNIX 5.0.2\tcc\t\t1041\t1111\n * CCC 3230\t-\t\tXelos R01(SVR2)\tcc\t\t1040\t1126\n * Stride\t68000-12Mhz\tSystem-V/68\tcc\t\t1063\t1136\n * IBM PC/AT    80286-6Mhz      Venix/286 SVR2  cc              1056    1149\n * Plexus P/60  MC68000-12.5Mhz\tUNIX SYSIII\tPlexus\t\t1111\t1163 T\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tDatalight 1.10\t1190\t1190\n * ATT PC6300+\t80286-6Mhz\tMSDOS 3.1\tb16cc 2.0\t1111\t1219\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.1\tWizard 2.1\t1136\t1219\n * Sun2/120\t68010-10Mhz\tSun 4.2BSD\tcc\t\t1136\t1219\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.0\tCI-C86 2.20M\t1219\t1219\n * WICAT PB\t68000-8Mhz\tSystem V\tWICAT C 4.1\t 998\t1226 ~\n * MASSCOMP 500\t68010-10MHz\tRTU V3.0\tcc (V3.2)\t1156\t1238\n * Alliant FX/8 IP (68012-12Mhz) Concentrix\tcc -ip;exec -i \t1170\t1243 FX\n * Cyb DataMate\t68010-12.5Mhz\tUniplus 5.0\tUnisoft cc\t1162\t1250\n * PDP 11/70\t-\t\tUNIX 5.2\tcc\t\t1162\t1250\n * IBM PC/AT\t80286-6Mhz\tPCDOS 3.1\tLattice 2.15\t1250\t1250\n * IBM PC/AT\t80286-7.5Mhz\tVenix/86 2.1\tcc\t\t1190\t1315 *15\n * Sun2/120\t68010-10Mhz\tStandalone\tcc\t\t1219\t1315\n * Intel 380\t80286-8Mhz\tXenix R3.0up1\tcc\t\t1250\t1315 *16\n * Sequent Balance 8000\tNS32032-10MHz\tDynix 2.0\tcc\t1250\t1315 N12\n * IBM PC/DSI-32 32032-10Mhz\tMSDOS 3.1\tGreenHills 2.14\t1282\t1315 C3\n * ATT 3B2/400\tWE32100-?Mhz\tUNIX 5.2\tcc\t\t1315\t1315\n * CCC 3250XP\t-\t\tXelos R01(SVR2)\tcc\t\t1215\t1318\n * IBM PC/RT 032 RISC(801?)?Mhz BSD 4.2         cc              1248    1333 RT\n * DG MV4000\t-\t\tAOS/VS 5.00\tcc\t\t1333\t1333\n * IBM PC/AT\t80286-8Mhz\tVenix/86 2.1\tcc\t\t1275\t1380 *16\n * IBM PC/AT\t80286-6Mhz\tMSDOS 3.0\tMicrosoft 3.0\t1250\t1388\n * ATT PC6300+\t80286-6Mhz\tMSDOS 3.1\tCI-C86 2.20M\t1428\t1428\n * COMPAQ/286   80286-8Mhz      Venix/286 SVR2  cc              1326    1443\n * IBM PC/AT    80286-7.5Mhz    Venix/286 SVR2  cc              1333    1449 *15\n * WICAT PB\t68000-8Mhz\tSystem V\tWICAT C 4.1\t1169\t1464 S~\n * Tandy II/6000 68000-8Mhz\tXenix 3.0\tcc      \t1384\t1477\n * MicroVAX II\t-\t\tMach/4.3\tcc\t\t1513\t1536\n * WICAT MB\t68000-12.5Mhz\tSystem V\tWICAT C 4.1\t1246\t1537 ~\n * IBM PC/AT    80286-9Mhz      SCO Xenix V     cc              1540    1556 *18\n * Cyb DataMate\t68010-12.5Mhz\tUniplus 5.0\tUnisoft cc\t1470\t1562 S\n * VAX 11/780\t-\t\tUNIX 5.2\tcc\t\t1515\t1562\n * MicroVAX-II\t-\t\t-\t\t-\t\t1562\t1612\n * VAX-780/MA780\t\tMach/4.3\tcc\t\t1587\t1612\n * VAX 11/780\t-\t\tUNIX 4.3bsd\tcc\t\t1646\t1662\n * Apollo DN660\t-\t\tAegisSR9/IX\tcc 3.12\t\t1666\t1666\n * ATT 3B20\t-\t\tUNIX 5.2\tcc\t\t1515\t1724\n * NEC PC-98XA\t80286-8Mhz\tPCDOS 3.1\tLattice 2.15\t1724\t1724 @\n * HP9000-500\tB series CPU\tHP-UX 4.02\tcc\t\t1724\t-\n * Ridge 32C V1\t-\t\tROS 3.3\t\tRidge C (older)\t1776\t-\n * IBM PC/STD\t80286-8Mhz\tMSDOS 3.0 \tMicrosoft 3.0\t1724\t1785 C2\n * WICAT MB\t68000-12.5Mhz\tSystem V\tWICAT C 4.1\t1450\t1814 S~\n * WICAT PB\t68000-12.5Mhz\tSystem V\tWICAT C 4.1\t1530\t1898 ~\n * DEC-2065\tKL10-Model B\tTOPS-20 6.1FT5\tPort. C Comp.\t1937\t1946\n * Gould PN6005\t-\t\tUTX 1.1(4.2BSD)\tcc\t\t1675\t1964\n * DEC2060\tKL-10\t\tTOPS-20\t\tcc\t\t2000\t2000 NM\n * Intel 310AP\t80286-8Mhz\tXenix 3.0\tcc\t\t1893\t2009\n * VAX 11/785\t-\t\tUNIX 5.2\tcc\t\t2083\t2083\n * VAX 11/785\t-\t\tVMS\t\tVAX-11 C 2.0\t2083\t2083\n * VAX 11/785\t-\t\tUNIX SVR2\tcc\t\t2123\t2083\n * VAX 11/785   -               ULTRIX-32 1.1   cc\t\t2083    2091 \n * VAX 11/785\t-\t\tUNIX 4.3bsd\tcc\t\t2135\t2136\n * WICAT PB\t68000-12.5Mhz\tSystem V\tWICAT C 4.1\t1780\t2233 S~\n * Pyramid 90x\t-\t\tOSx 2.3\t\tcc\t\t2272\t2272\n * Pyramid 90x\tFPA,cache,4Mb\tOSx 2.5\t\tcc no -O\t2777\t2777\n * Pyramid 90x\tw/cache\t\tOSx 2.5\t\tcc w/-O\t\t3333\t3333\n * IBM-4341-II\t-\t\tVM/SP3\t\tWaterloo C 1.2  3333\t3333\n * IRIS-2400T\t68020-16.67Mhz\tUNIX System V\tcc\t\t3105\t3401\n * Celerity C-1200 ?\t\tUNIX 4.2BSD\tcc\t\t3485\t3468\n * SUN 3/75\t68020-16.67Mhz\tSUN 4.2 V3\tcc\t\t3333\t3571\n * IBM-4341\tModel 12\tUTS 5.0\t\t?\t\t3685\t3685\n * SUN-3/160    68020-16.67Mhz  Sun 4.2 V3.0A   cc\t\t3381    3764\n * Sun 3/180\t68020-16.67Mhz\tSun 4.2\t\tcc\t\t3333\t3846\n * IBM-4341\tModel 12\tUTS 5.0\t\t?\t\t3910\t3910 MN\n * MC 5400\t68020-16.67MHz\tRTU V3.0\tcc (V4.0)\t3952\t4054\n * Intel 386/20\t80386-12.5Mhz\tPMON debugger\tIntel C386v0.2\t4149\t4386\n * NCR Tower32  68020-16.67Mhz  SYS 5.0 Rel 2.0 cc              3846\t4545\n * MC 5600/5700\t68020-16.67MHz\tRTU V3.0\tcc (V4.0)\t4504\t4746 %\n * Intel 386/20\t80386-12.5Mhz\tPMON debugger\tIntel C386v0.2\t4534\t4794 i1\n * Intel 386/20\t80386-16Mhz\tPMON debugger\tIntel C386v0.2\t5304\t5607\n * Gould PN9080\tcustom ECL\tUTX-32 1.1C\tcc\t\t5369\t5676\n * Gould 1460-342 ECL proc      UTX/32 1.1/c    cc              5342    5677 G1\n * VAX-784\t-\t\tMach/4.3\tcc\t\t5882\t5882 &4\n * Intel 386/20\t80386-16Mhz\tPMON debugger\tIntel C386v0.2\t5801\t6133 i1\n * VAX 8600\t-\t\tUNIX 4.3bsd\tcc\t\t7024\t7088\n * VAX 8600\t-\t\tVMS\t\tVAX-11 C 2.0\t7142\t7142\n * Alliant FX/8 CE\t\tConcentrix\tcc -ce;exec -c \t6952\t7655 FX\n * CCI POWER 6/32\t\tCOS(SV+4.2)\tcc\t\t7500\t7800\n * CCI POWER 6/32\t\tPOWER 6 UNIX/V\tcc\t\t8236\t8498\n * CCI POWER 6/32\t\t4.2 Rel. 1.2b\tcc\t\t8963\t9544\n * Sperry (CCI Power 6)\t\t4.2BSD\t\tcc\t\t9345   10000\n * CRAY-X-MP/12\t   105Mhz\tCOS 1.14\tCray C         10204   10204\n * IBM-3083\t-\t\tUTS 5.0 Rel 1\tcc\t       16666   12500\n * CRAY-1A\t    80Mhz\tCTSS\t\tCray C 2.0     12100   13888\n * IBM-3083\t-\t\tVM/CMS HPO 3.4\tWaterloo C 1.2 13889   13889\n * Amdahl 470 V/8 \t\tUTS/V 5.2       cc v1.23       15560   15560\n * CRAY-X-MP/48\t   105Mhz\tCTSS\t\tCray C 2.0     15625   17857\n * Amdahl 580\t-\t\tUTS 5.0 Rel 1.2\tcc v1.5        23076   23076\n * Amdahl 5860\t \t\tUTS/V 5.2       cc v1.23       28970   28970\n *\n * NOTE\n *   *   Crystal changed from 'stock' to listed value.\n *   +   This Macintosh was upgraded from 128K to 512K in such a way that\n *       the new 384K of memory is not slowed down by video generator accesses.\n *   %   Single processor; MC == MASSCOMP\n *   NM  A version 7 C compiler written at New Mexico Tech.\n *   @   vanilla Lattice compiler used with MicroPro standard library\n *   S   Shorts used instead of ints\n *   T\t with Chris Torek's patches (whatever they are).\n *   ~   For WICAT Systems: MB=MultiBus, PB=Proprietary Bus\n *   LM  Large Memory Model. (Otherwise, all 80x8x results are small model)\n *   MM  Medium Memory Model. (Otherwise, all 80x8x results are small model)\n *   C1  Univation PC TURBO Co-processor; 9.54Mhz 8086, 640K RAM\n *   C2  Seattle Telecom STD-286 board\n *   C3  Definicon DSI-32 coprocessor\n *   C?  Unknown co-processor board?\n *   CT1 Convergent Technologies MegaFrame, 1 processor.\n *   MN  Using Mike Newtons 'optimizer' (see net.sources).\n *   G1  This Gould machine has 2 processors and was able to run 2 dhrystone\n *       Benchmarks in parallel with no slowdown.\n *   FH  FHC == Frank Hogg Labs (Hazelwood Uniquad 2 in an FHL box).\n *   FX  The Alliant FX/8 is a system consisting of 1-8 CEs (computation\n *\t engines) and 1-12 IPs (interactive processors). Note N8 applies.\n *   RT  This is one of the RT's that CMU has been using for awhile.  I'm\n *\t not sure that this is identical to the machine that IBM is selling\n *\t to the public.\n *   i1  Normally, the 386/20 starter kit has a 16k direct mapped cache\n *\t which inserts 2 or 3 wait states on a write thru.  These results\n *\t were obtained by disabling the write-thru, or essentially turning\n *\t the cache into 0 wait state memory.\n *   Nnn This machine has multiple processors, allowing \"nn\" copies of the\n *\t benchmark to run in the same time as 1 copy.\n *   &nn This machine has \"nn\" processors, and the benchmark results were\n *\t obtained by having all \"nn\" processors working on 1 copy of dhrystone.\n *\t (Note, this is different than Nnn. Salesmen like this measure).\n *   ?   I don't trust results marked with '?'.  These were sent to me with\n *       either incomplete info, or with times that just don't make sense.\n *\t ?? means I think the performance is too poor, ?! means too good.\n *       If anybody can confirm these figures, please respond.\n *\n *  ABBREVIATIONS\n *\tCCC\tConcurrent Computer Corp. (was Perkin-Elmer)\n *\tMC\tMasscomp\n *\n *--------------------------------RESULTS END----------------------------------\n *\n *\tThe following program contains statements of a high-level programming\n *\tlanguage (C) in a distribution considered representative:\n *\n *\tassignments\t\t\t53%\n *\tcontrol statements\t\t32%\n *\tprocedure, function calls\t15%\n *\n *\t100 statements are dynamically executed.  The program is balanced with\n *\trespect to the three aspects:\n *\t\t- statement type\n *\t\t- operand type (for simple data types)\n *\t\t- operand access\n *\t\t\toperand global, local, parameter, or constant.\n *\n *\tThe combination of these three aspects is balanced only approximately.\n *\n *\tThe program does not compute anything meaningfull, but it is\n *\tsyntactically and semantically correct.\n *\n */\n\n/* Accuracy of timings and human fatigue controlled by next two lines */\n/*#define LOOPS\t5000\t\t/* Use this for slow or 16 bit machines */\n/*#define LOOPS\t50000\t\t/* Use this for slow or 16 bit machines */\n#define LOOPS\t500000\t\t/* Use this for faster machines */\n\n/* Compiler dependent options */\n#undef\tNOENUM\t\t\t/* Define if compiler has no enum's */\n#undef\tNOSTRUCTASSIGN\t\t/* Define if compiler can't assign structures */\n\n/* define only one of the next three defines */\n#define GETRUSAGE\t\t/* Use getrusage(2) time function */\n/*#define TIMES\t\t\t/* Use times(2) time function */\n/*#define TIME\t\t\t/* Use time(2) time function */\n\n/* define the granularity of your times(2) function (when used) */\n/*#define HZ\t60\t\t/* times(2) returns 1/60 second (most) */\n/*#define HZ\t100\t\t/* times(2) returns 1/100 second (WECo) */\n\n/* for compatibility with goofed up version */\n/*#define GOOF\t\t\t/* Define if you want the goofed up version */\n\n/* default number of threads that will be spawned */\n#define DEFAULT_THREADS 1\n\n/* Dhrystones per second obtained on VAX11/780 -- a notional 1MIPS machine. */\n/* Used in DMIPS calculation. */\n#define ONE_MIPS 1757\n\n#ifdef GOOF\nchar\tVersion[] = \"1.0\";\n#else\nchar\tVersion[] = \"1.1\";\n#endif\n\n#ifdef\tNOSTRUCTASSIGN\n#define\tstructassign(d, s)\tmemcpy(&(d), &(s), sizeof(d))\n#else\n#define\tstructassign(d, s)\td = s\n#endif\n\n#ifdef\tNOENUM\n#define\tIdent1\t1\n#define\tIdent2\t2\n#define\tIdent3\t3\n#define\tIdent4\t4\n#define\tIdent5\t5\ntypedef int\tEnumeration;\n#else\ntypedef enum\t{Ident1, Ident2, Ident3, Ident4, Ident5} Enumeration;\n#endif\n\ntypedef int\tOneToThirty;\ntypedef int\tOneToFifty;\ntypedef char\tCapitalLetter;\ntypedef char\tString30[31];\ntypedef int\tArray1Dim[51];\ntypedef int\tArray2Dim[51][51];\n\nstruct\tRecord\n{\n\tstruct Record\t\t*PtrComp;\n\tEnumeration\t\tDiscr;\n\tEnumeration\t\tEnumComp;\n\tOneToFifty\t\tIntComp;\n\tString30\t\tStringComp;\n};\n\ntypedef struct Record \tRecordType;\ntypedef RecordType *\tRecordPtr;\ntypedef int\t\tboolean;\n\n//#define\tNULL\t\t0\n#define\tTRUE\t\t1\n#define\tFALSE\t\t0\n\n#ifndef REG\n#define\tREG\n#endif\n\nextern Enumeration\tFunc1();\nextern boolean\t\tFunc2();\n\n#ifdef TIMES\n#include <sys/param.h>\n#include <sys/types.h>\n#endif\n#ifdef GETRUSAGE\n#include <sys/resource.h>\n#endif\n#include <time.h>\n#include <unistd.h>\n#include <sys/wait.h>\n#include <stdlib.h>\n#include <stdio.h>\n#include <string.h>\n#include <sys/time.h>\n\n\nmain(int argc, char** argv)\n{\n\tint num_threads = DEFAULT_THREADS;\n\tint runtime = 0;\n\tint delay = 0;\n\tlong mloops = 0;\n\n\tint opt;\n\twhile ((opt = getopt(argc, argv, \"ht:r:d:l:\")) != -1) {\n\t\tswitch (opt) {\n\t\t\tcase 'h':\n\t\t\t\tprinthelp();\n\t\t\t\texit(0);\n\t\t\t\tbreak;\n\t\t\tcase 't':\n\t\t\t\tnum_threads = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'r':\n\t\t\t\truntime = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'd':\n\t\t\t\tdelay = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'l':\n\t\t\t\tmloops = atoll(optarg);\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tif (runtime && mloops) {\n\t\tfprintf(stderr, \"-r and -l options cannot be specified at the same time.\\n\");\n\t\texit(1);\n\t} else if (!runtime && !mloops) {\n\t\tfprintf(stderr, \"Must specify either -r or -l option; use -h to see help.\\n\");\n\t\texit(1);\n\t}\n\n\tlong num_loops = mloops ? mloops * 1000000L : LOOPS * num_threads;\n\trun_dhrystone(runtime, num_threads, num_loops, delay);\n}\n\nrun_dhrystone(int duration, int num_threads, long num_loops, int delay) {\n\tprintf(\"duration: %d seconds\\n\", duration);\n\tprintf(\"number of threads: %d\\n\", num_threads);\n\tprintf(\"number of loops: %ld\\n\", num_loops);\n\tprintf(\"delay between starting threads: %d seconds\\n\", delay);\n\tprintf(\"\\n\");\n\n\tpid_t *children = malloc(num_threads* sizeof(pid_t));\n\tint loops_per_thread = num_loops / num_threads;\n\n\tclock_t run_start = clock();\n\n\tlong i;\n\tint actual_duration;\n\tfor (i = 0; i < (num_threads - 1); i++) {\n\t\tpid_t c = fork();\n\t\tif (c == 0) {\n\t\t\t// child\n\t\t\tactual_duration = duration - i * delay;\n\t\t\tif (actual_duration < 0)\n\t\t\t\tactual_duration = 0;\n\t\t\trun_for_duration(actual_duration, loops_per_thread);\n\t\t\texit(0);\n\t\t}\n\t\t\n\t\tchildren[i] = c;\n\t\tsleep(delay);\n\t}\n\n\tactual_duration = duration - delay * (num_threads - 1);\n\tif (actual_duration < 0)\n\t\tactual_duration = 0;\n\trun_for_duration(actual_duration, loops_per_thread);\n\n\tfor (i = 0; i < num_threads; i++) {\n\t\tint status, w;\n\t\tdo {\n\t\t\tw = wait(&status);\n\t\t} while (w != -1 && (!WIFEXITED(status) && !WIFSIGNALED(status)));\n\t}\n\n\tclock_t run_end = clock();\n\tprintf(\"\\nTotal dhrystone run time: %f seconds.\\n\", (double)(run_end - run_start) / CLOCKS_PER_SEC);\n\n\texit(0);\n}\n\nrun_for_duration(int duration, long num_loops) {\n\tclock_t end = clock() + duration * CLOCKS_PER_SEC;\n\n\tdo {\n\t\tProc0(num_loops, duration == 0);\n\t} while (clock() < end);\n}\n\nprinthelp() {\n\tprintf(\"Usage: dhrystone (-h | -l MLOOPS | -r DURATION) [-t THREADS [-d DELAY]]\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"Runs dhrystone benchmark either for a specfied duration or for a specified\\n\");\n\tprintf(\"number of iterations.\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"Options:\\n\");\n\tprintf(\"    -h          Print this message and exit.\\n\");\n\tprintf(\"    -l MLOOPS   Run dhrystone for the specified number of millions\\n\");\n\tprintf(\"                of iterations (i.e. the actual number of iterations is\\n\");\n\tprintf(\"                MLOOPS * 1e6).\\n\");\n\tprintf(\"    -r DURATION Run dhhrystone for the specified duration (in seconds). \\n\");\n\tprintf(\"                dhrystone will be run 500000 iterations, looping until\\n\");\n\tprintf(\"                the specified time period has passed.\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"    Note: -r and -l options may not be specified at the same time.\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"    -t THREADS  Specified the number of concurrent threads (processes,\\n\");\n\tprintf(\"                actually) that will be spawned. Defaults to 1.\\n\");\n\tprintf(\"    -d DELAY    if THREADS is > 1, this specifies the delay between\\n\");\n\tprintf(\"                spawning the threads.\\n\");\n\tprintf(\"\\n\");\n}\n\n\n/*\n * Package 1\n */\nint\t\tIntGlob;\nboolean\t\tBoolGlob;\nchar\t\tChar1Glob;\nchar\t\tChar2Glob;\nArray1Dim\tArray1Glob;\nArray2Dim\tArray2Glob;\nRecordPtr\tPtrGlb;\nRecordPtr\tPtrGlbNext;\n\nProc0(long numloops, boolean print_result)\n{\n\tOneToFifty\t\tIntLoc1;\n\tREG OneToFifty\t\tIntLoc2;\n\tOneToFifty\t\tIntLoc3;\n\tREG char\t\tCharLoc;\n\tREG char\t\tCharIndex;\n\tEnumeration\t \tEnumLoc;\n\tString30\t\tString1Loc;\n\tString30\t\tString2Loc;\n\t//\textern char\t\t*malloc();\n\n\tregister unsigned int\ti;\n#ifdef TIME\n\tlong\t\t\ttime();\n\tlong\t\t\tstarttime;\n\tlong\t\t\tbenchtime;\n\tlong\t\t\tnulltime;\n\n\tstarttime = time( (long *) 0);\n\tfor (i = 0; i < numloops; ++i);\n\tnulltime = time( (long *) 0) - starttime; /* Computes o'head of loop */\n#endif\n#ifdef TIMES\n\ttime_t\t\t\tstarttime;\n\ttime_t\t\t\tbenchtime;\n\ttime_t\t\t\tnulltime;\n\tstruct tms\t\ttms;\n\n\ttimes(&tms); starttime = tms.tms_utime;\n\tfor (i = 0; i < numloops; ++i);\n\ttimes(&tms);\n\tnulltime = tms.tms_utime - starttime; /* Computes overhead of looping */\n#endif\n#ifdef GETRUSAGE\n\tstruct rusage starttime;\n\tstruct rusage endtime;\n\tstruct timeval nulltime;\n\n\tgetrusage(RUSAGE_SELF, &starttime);\n\tfor (i = 0; i < numloops; ++i);\n\tgetrusage(RUSAGE_SELF, &endtime);\n\tnulltime.tv_sec  = endtime.ru_utime.tv_sec  - starttime.ru_utime.tv_sec;\n\tnulltime.tv_usec = endtime.ru_utime.tv_usec - starttime.ru_utime.tv_usec;\n#endif\n\n\tPtrGlbNext = (RecordPtr) malloc(sizeof(RecordType));\n\tPtrGlb = (RecordPtr) malloc(sizeof(RecordType));\n\tPtrGlb->PtrComp = PtrGlbNext;\n\tPtrGlb->Discr = Ident1;\n\tPtrGlb->EnumComp = Ident3;\n\tPtrGlb->IntComp = 40;\n\tstrcpy(PtrGlb->StringComp, \"DHRYSTONE PROGRAM, SOME STRING\");\n#ifndef\tGOOF\n\tstrcpy(String1Loc, \"DHRYSTONE PROGRAM, 1'ST STRING\");\t/*GOOF*/\n#endif\n\tArray2Glob[8][7] = 10;\t/* Was missing in published program */\n\n/*****************\n-- Start Timer --\n*****************/\n#ifdef TIME\n\tstarttime = time( (long *) 0);\n#endif\n#ifdef TIMES\n\ttimes(&tms); starttime = tms.tms_utime;\n#endif\n#ifdef GETRUSAGE\n\tgetrusage (RUSAGE_SELF, &starttime);\n#endif\n\tfor (i = 0; i < numloops; ++i)\n\t{\n\n\t\tProc5();\n\t\tProc4();\n\t\tIntLoc1 = 2;\n\t\tIntLoc2 = 3;\n\t\tstrcpy(String2Loc, \"DHRYSTONE PROGRAM, 2'ND STRING\");\n\t\tEnumLoc = Ident2;\n\t\tBoolGlob = ! Func2(String1Loc, String2Loc);\n\t\twhile (IntLoc1 < IntLoc2)\n\t\t{\n\t\t\tIntLoc3 = 5 * IntLoc1 - IntLoc2;\n\t\t\tProc7(IntLoc1, IntLoc2, &IntLoc3);\n\t\t\t++IntLoc1;\n\t\t}\n\t\tProc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3);\n\t\tProc1(PtrGlb);\n\t\tfor (CharIndex = 'A'; CharIndex <= Char2Glob; ++CharIndex)\n\t\t\tif (EnumLoc == Func1(CharIndex, 'C'))\n\t\t\t\tProc6(Ident1, &EnumLoc);\n\t\tIntLoc3 = IntLoc2 * IntLoc1;\n\t\tIntLoc2 = IntLoc3 / IntLoc1;\n\t\tIntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1;\n\t\tProc2(&IntLoc1);\n\t}\n\n/*****************\n-- Stop Timer --\n*****************/\n\n\tif (print_result) {\n#ifdef TIME\n\t\tbenchtime = time( (long *) 0) - starttime - nulltime;\n\t\tprintf(\"Dhrystone(%s) time for %ld passes = %ld\\n\",\n\t\t\tVersion,\n\t\t\t(long) numloops, benchtime);\n\t\tprintf(\"This machine benchmarks at %ld dhrystones/second\\n\",\n\t\t\t((long) numloops) / benchtime);\n\t\tprintf(\"                           %ld DMIPS\\n\",\n\t\t\t((long) numloops) / benchtime / ONE_MIPS);\n#endif\n#ifdef TIMES\n\t\ttimes(&tms);\n\t\tbenchtime = tms.tms_utime - starttime - nulltime;\n\t\tprintf(\"Dhrystone(%s) time for %ld passes = %ld\\n\",\n\t\t\tVersion,\n\t\t\t(long) numloops, benchtime/HZ);\n\t\tprintf(\"This machine benchmarks at %ld dhrystones/second\\n\",\n\t\t\t((long) numloops) * HZ / benchtime);\n\t\tprintf(\"                           %ld DMIPS\\n\",\n\t\t\t((long) numloops) * HZ / benchtime / ONE_MIPS);\n#endif\n#ifdef GETRUSAGE\n\t\tgetrusage(RUSAGE_SELF, &endtime);\n\t\t{\n\t\t    double t = (double)(endtime.ru_utime.tv_sec\n\t\t\t\t\t- starttime.ru_utime.tv_sec\n\t\t\t\t\t- nulltime.tv_sec)\n\t\t\t     + (double)(endtime.ru_utime.tv_usec\n\t\t\t\t\t- starttime.ru_utime.tv_usec\n\t\t\t\t\t- nulltime.tv_usec) * 1e-6;\n\t\t    printf(\"Dhrystone(%s) time for %ld passes = %.1f\\n\",\n\t\t\t   Version,\n\t\t\t   (long)numloops,\n\t\t\t   t);\n\t\t    printf(\"This machine benchmarks at %.0f dhrystones/second\\n\",\n\t\t\t   (double)numloops / t);\n\t\t    printf(\"                           %.0f DMIPS\\n\",\n\t\t\t   (double)numloops / t / ONE_MIPS);\n\t\t}\n#endif\n\t}\n\n}\n\nProc1(PtrParIn)\nREG RecordPtr\tPtrParIn;\n{\n#define\tNextRecord\t(*(PtrParIn->PtrComp))\n\n\tstructassign(NextRecord, *PtrGlb);\n\tPtrParIn->IntComp = 5;\n\tNextRecord.IntComp = PtrParIn->IntComp;\n\tNextRecord.PtrComp = PtrParIn->PtrComp;\n\tProc3(NextRecord.PtrComp);\n\tif (NextRecord.Discr == Ident1)\n\t{\n\t\tNextRecord.IntComp = 6;\n\t\tProc6(PtrParIn->EnumComp, &NextRecord.EnumComp);\n\t\tNextRecord.PtrComp = PtrGlb->PtrComp;\n\t\tProc7(NextRecord.IntComp, 10, &NextRecord.IntComp);\n\t}\n\telse\n\t\tstructassign(*PtrParIn, NextRecord);\n\n#undef\tNextRecord\n}\n\nProc2(IntParIO)\nOneToFifty\t*IntParIO;\n{\n\tREG OneToFifty\t\tIntLoc;\n\tREG Enumeration\t\tEnumLoc;\n\n\tIntLoc = *IntParIO + 10;\n\tfor(;;)\n\t{\n\t\tif (Char1Glob == 'A')\n\t\t{\n\t\t\t--IntLoc;\n\t\t\t*IntParIO = IntLoc - IntGlob;\n\t\t\tEnumLoc = Ident1;\n\t\t}\n\t\tif (EnumLoc == Ident1)\n\t\t\tbreak;\n\t}\n}\n\nProc3(PtrParOut)\nRecordPtr\t*PtrParOut;\n{\n\tif (PtrGlb != NULL)\n\t\t*PtrParOut = PtrGlb->PtrComp;\n\telse\n\t\tIntGlob = 100;\n\tProc7(10, IntGlob, &PtrGlb->IntComp);\n}\n\nProc4()\n{\n\tREG boolean\tBoolLoc;\n\n\tBoolLoc = Char1Glob == 'A';\n\tBoolLoc |= BoolGlob;\n\tChar2Glob = 'B';\n}\n\nProc5()\n{\n\tChar1Glob = 'A';\n\tBoolGlob = FALSE;\n}\n\nextern boolean Func3();\n\nProc6(EnumParIn, EnumParOut)\nREG Enumeration\tEnumParIn;\nREG Enumeration\t*EnumParOut;\n{\n\t*EnumParOut = EnumParIn;\n\tif (! Func3(EnumParIn) )\n\t\t*EnumParOut = Ident4;\n\tswitch (EnumParIn)\n\t{\n\tcase Ident1:\t*EnumParOut = Ident1; break;\n\tcase Ident2:\tif (IntGlob > 100) *EnumParOut = Ident1;\n\t\t\telse *EnumParOut = Ident4;\n\t\t\tbreak;\n\tcase Ident3:\t*EnumParOut = Ident2; break;\n\tcase Ident4:\tbreak;\n\tcase Ident5:\t*EnumParOut = Ident3;\n\t}\n}\n\nProc7(IntParI1, IntParI2, IntParOut)\nOneToFifty\tIntParI1;\nOneToFifty\tIntParI2;\nOneToFifty\t*IntParOut;\n{\n\tREG OneToFifty\tIntLoc;\n\n\tIntLoc = IntParI1 + 2;\n\t*IntParOut = IntParI2 + IntLoc;\n}\n\nProc8(Array1Par, Array2Par, IntParI1, IntParI2)\nArray1Dim\tArray1Par;\nArray2Dim\tArray2Par;\nOneToFifty\tIntParI1;\nOneToFifty\tIntParI2;\n{\n\tREG OneToFifty\tIntLoc;\n\tREG OneToFifty\tIntIndex;\n\n\tIntLoc = IntParI1 + 5;\n\tArray1Par[IntLoc] = IntParI2;\n\tArray1Par[IntLoc+1] = Array1Par[IntLoc];\n\tArray1Par[IntLoc+30] = IntLoc;\n\tfor (IntIndex = IntLoc; IntIndex <= (IntLoc+1); ++IntIndex)\n\t\tArray2Par[IntLoc][IntIndex] = IntLoc;\n\t++Array2Par[IntLoc][IntLoc-1];\n\tArray2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc];\n\tIntGlob = 5;\n}\n\nEnumeration Func1(CharPar1, CharPar2)\nCapitalLetter\tCharPar1;\nCapitalLetter\tCharPar2;\n{\n\tREG CapitalLetter\tCharLoc1;\n\tREG CapitalLetter\tCharLoc2;\n\n\tCharLoc1 = CharPar1;\n\tCharLoc2 = CharLoc1;\n\tif (CharLoc2 != CharPar2)\n\t\treturn (Ident1);\n\telse\n\t\treturn (Ident2);\n}\n\nboolean Func2(StrParI1, StrParI2)\nString30\tStrParI1;\nString30\tStrParI2;\n{\n\tREG OneToThirty\t\tIntLoc;\n\tREG CapitalLetter\tCharLoc;\n\n\tIntLoc = 1;\n\twhile (IntLoc <= 1)\n\t\tif (Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1)\n\t\t{\n\t\t\tCharLoc = 'A';\n\t\t\t++IntLoc;\n\t\t}\n\tif (CharLoc >= 'W' && CharLoc <= 'Z')\n\t\tIntLoc = 7;\n\tif (CharLoc == 'X')\n\t\treturn(TRUE);\n\telse\n\t{\n\t\tif (strcmp(StrParI1, StrParI2) > 0)\n\t\t{\n\t\t\tIntLoc += 7;\n\t\t\treturn (TRUE);\n\t\t}\n\t\telse\n\t\t\treturn (FALSE);\n\t}\n}\n\nboolean Func3(EnumParIn)\nREG Enumeration\tEnumParIn;\n{\n\tREG Enumeration\tEnumLoc;\n\n\tEnumLoc = EnumParIn;\n\tif (EnumLoc == Ident3) return (TRUE);\n\treturn (FALSE);\n}\n\n#ifdef\tNOSTRUCTASSIGN\nmemcpy(d, s, l)\nregister char\t*d;\nregister char\t*s;\nregister int\tl;\n{\n\twhile (l--) *d++ = *s++;\n}\n#endif\n/* ---------- */\n"
  },
  {
    "path": "wa/workloads/drarm/__init__.py",
    "content": "#    Copyright 2023 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nimport os\nimport pandas as pd\nfrom wa import ApkWorkload, Parameter\nfrom devlib.exception import TargetStableCalledProcessError\n\n\nclass DrArm(ApkWorkload):\n\n    name = 'drarm'\n    package_names = ['com.Arm.DrArm']\n    activity = 'com.unity3d.player.UnityPlayerActivity'\n    view = \"SurfaceView[com.Arm.DrArm/com.unity3d.player.UnityPlayerActivity](BLAST)\"\n    install_timeout = 200\n    description = \"\"\"\n    Dr. Arm’s Amazing Adventures is a “Souls-Like” Mobile Action Role Playing Game developed at Arm.\n    \"\"\"\n\n    parameters = [\n        Parameter('timeout', kind=int, default=126,\n                  description='The amount of time the game should run for'),\n        Parameter('auto_demo', kind=bool, default=False,\n                  description='Start the demo automatically'),\n        Parameter('show_fps', kind=bool, default=False,\n                  description='Show the FPS count window in-game'),\n        Parameter('adpf', kind=bool, default=True,\n                  description='Enable ADPF'),\n        Parameter('adpf_auto', kind=bool, default=True,\n                  description='Enable automatic ADPF mode'),\n        Parameter('adpf_logging', kind=bool, default=False,\n                  description='Enable ADPF logging'),\n        Parameter('verbose_log', kind=bool, default=False,\n                  description='Emit reported stats as debug logs'),\n        Parameter('adpf_interventions', kind=bool, default=True,\n                  description='Enable ADPF interventions'),\n        Parameter('target_vsyncs', kind=int, default=1,\n                  description='the number of vsyncs to target to a frame (1 = current display rate)'),\n        Parameter('target_framerate', kind=int, default=None,\n                  description='Target framerate for the application'),\n        Parameter('fps_report_file', kind=str, default=None,\n                  description='File name that the ADPF FPS report should use.'),\n        Parameter('fixed_time_step', kind=float, default=None,\n                  description='Time, in seconds, that should be used in advancing the simulation'),\n    ]\n\n    @property\n    def apk_arguments(self):\n        args = {\n            'showFPS': int(self.show_fps),\n            'doAdpf': int(self.adpf),\n            'adpfMode': int(self.adpf_auto),\n            'adpfLogging': int(self.adpf_logging),\n            'verboseLog': int(self.verbose_log),\n            'adpfInterventions': int(self.adpf_interventions),\n            'targetVsyncs': self.target_vsyncs,\n            'autoDemo': int(self.auto_demo),\n        }\n\n        if self.target_framerate is not None:\n            args['targetFramerate'] = self.target_framerate\n\n        if self.fixed_time_step is not None:\n            args['fixedTimeStep'] = self.fixed_time_step\n\n        if self.fps_report_file is not None:\n            args['fpsReportFileName'] = self.fps_report_file\n\n        return args\n\n    def run(self, context):\n        self.target.sleep(self.timeout)\n\n    def update_output(self, context):\n        super(DrArm, self).update_output(context)\n        outfile_glob = self.target.path.join(\n            self.target.external_storage_app_dir, self.apk.package, 'files', '*.csv'\n        )\n\n        try:\n            ls_output = self.target.execute('ls {}'.format(outfile_glob))\n        except TargetStableCalledProcessError:\n            self.logger.warning('Failed to find the ADPF report file.')\n            return\n\n        on_target_output_files = [f.strip() for f in ls_output.split('\\n') if f]\n\n        self.logger.info('Extracting the ADPF FPS report from target...')\n        for file in on_target_output_files:\n            host_output_file = os.path.join(context.output_directory, os.path.basename(file))\n            self.target.pull(file, host_output_file)\n            context.add_artifact('adpf', host_output_file, kind='data',\n                                 description='ADPF report log in CSV format.')\n\n            adpf_df = pd.read_csv(host_output_file)\n            if not adpf_df.empty:\n                context.add_metric('Average FPS', round(adpf_df['average fps'].mean(), 2))\n                context.add_metric('Frame count', int(adpf_df['# frame count'].iloc[-1]))\n"
  },
  {
    "path": "wa/workloads/exoplayer/__init__.py",
    "content": "# SPDX-License-Identifier: Apache-2.0\n#\n# Copyright (C) 2017, Arm Limited and contributors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport re\nimport os\nimport time\n\n# pylint: disable=wrong-import-position\nfrom future.standard_library import install_aliases\ninstall_aliases()\n\n# pylint: disable=import-error, wrong-import-order\nimport urllib.request\nimport urllib.parse\nimport urllib.error\n\nfrom wa import ApkWorkload, Parameter, ConfigError, WorkloadError\nfrom wa.framework.configuration.core import settings\nfrom wa.utils.types import boolean\nfrom wa.utils.misc import ensure_directory_exists\nfrom devlib.utils.android import grant_app_permissions\n\n# Regexps for benchmark synchronization\nREGEXPS = {\n    'start': '.*(Displayed|START).*com.google.android.exoplayer2.demo/.PlayerActivity',\n    'duration': '.*period \\[(?P<duration>[0-9]+.*)\\]',\n    'end': '.*state \\[.+, .+, E\\]',\n    'dropped_frames': '.*droppedFrames \\[(?P<session_time>[0-9]+\\.[0-9]+), (?P<count>[0-9]+)\\]'\n}\n\n\nDOWNLOAD_URLS = {\n    'mp4_1080p': 'http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_30fps_normal.mp4',\n    'mov_720p': 'http://download.blender.org/peach/bigbuckbunny_movies/big_buck_bunny_720p_h264.mov',\n    'mov_480p': 'http://download.blender.org/peach/bigbuckbunny_movies/big_buck_bunny_480p_h264.mov',\n    'ogg_128kbps': 'http://upload.wikimedia.org/wikipedia/commons/c/ca/Tchaikovsky_-_Romeo_and_Juliet_Ouverture_-_Antal_Dorati_(1959).ogg',\n}\n\n\nclass ExoPlayer(ApkWorkload):\n    \"\"\"\n    Android ExoPlayer\n\n    ExoPlayer is the basic video player library that is used by the YouTube\n    android app. The aim of this workload is to test a proxy for YouTube\n    performance on targets where running the real YouTube app is not possible\n    due its dependencies.\n\n    ExoPlayer sources: https://github.com/google/ExoPlayer\n\n    The 'demo' application is used by this workload.  It can easily be built by\n    loading the ExoPlayer sources into Android Studio.\n\n    Version r2.4.0 built from commit d979469 is known to work\n\n    Produces a metric 'exoplayer_dropped_frames' - this is the count of frames\n    that Exoplayer itself reports as dropped. This is not the same thing as the\n    dropped frames reported by gfxinfo.\n    \"\"\"\n\n    name = 'exoplayer'\n\n    video_directory = os.path.join(settings.dependencies_directory, name)\n\n    package_names = ['com.google.android.exoplayer2.demo']\n    supported_versions = ['2.4', '2.5', '2.6']\n    action = 'com.google.android.exoplayer.demo.action.VIEW'\n    default_format = 'mov_720p'\n    view = 'SurfaceView - com.google.android.exoplayer2.demo/com.google.android.exoplayer2.demo.PlayerActivity'\n\n    parameters = [\n        Parameter('version', allowed_values=supported_versions, override=True),\n        Parameter('duration', kind=int, default=20,\n                  description=\"\"\"\n                  Playback duration of the video file. This becomes the duration of the workload.\n                  If provided must be shorter than the length of the media.\n                  \"\"\"),\n        Parameter('format', allowed_values=list(DOWNLOAD_URLS.keys()),\n                  description=\"\"\"\n                  Specifies which format video file to play. Default is {}\n                  \"\"\".format(default_format)),\n        Parameter('filename',\n                  description=\"\"\"\n                   The name of the video file to play. This can be either a path\n                   to the file anywhere on your file system, or it could be just a\n                   name, in which case, the workload will look for it in\n                   ``{}``\n                   *Note*: either format or filename should be specified, but not both!\n                  \"\"\".format(video_directory)),\n        Parameter('force_dependency_push', kind=boolean, default=False,\n                  description=\"\"\"\n                  If true, video will always be pushed to device, regardless\n                  of whether the file is already on the device.  Default is ``False``.\n                  \"\"\"),\n        Parameter('landscape', kind=boolean, default=False,\n                  description=\"\"\"\n                  Configure the screen in landscape mode, otherwise ensure\n                  portrait orientation by default. Default is ``False``.\n                  \"\"\"),\n    ]\n\n    # pylint: disable=access-member-before-definition\n    def validate(self):\n        if self.format and self.filename:\n            raise ConfigError('Either format *or* filename must be specified; but not both.')\n\n        if not self.format and not self.filename:\n            self.format = self.default_format\n\n    def _find_host_video_file(self):\n        \"\"\"Pick the video file we're going to use, download it if necessary\"\"\"\n        if self.filename:\n            if self.filename[0] in './' or len(self.filename) > 1 and self.filename[1] == ':':\n                filepath = os.path.abspath(self.filename)\n            else:\n                filepath = os.path.join(self.video_directory, self.filename)\n            if not os.path.isfile(filepath):\n                raise WorkloadError('{} does not exist.'.format(filepath))\n            return filepath\n        else:\n            # Search for files we've already downloaded\n            files = []\n            format_ext, format_resolution = self.format.split('_')\n            for filename in os.listdir(self.video_directory):\n                _, file_ext = os.path.splitext(filename)\n                if file_ext == '.' + format_ext and format_resolution in filename:\n                    files.append(os.path.join(self.video_directory, filename))\n\n            if not files:\n                # Download a file with the requested format\n                url = DOWNLOAD_URLS[self.format]\n                filename = '{}_{}'.format(format_resolution, os.path.basename(url))\n                filepath = os.path.join(self.video_directory, filename)\n                self.logger.info('Downloading {} to {}...'.format(url, filepath))\n                urllib.request.urlretrieve(url, filepath)\n                return filepath\n            else:\n                if len(files) > 1:\n                    self.logger.warning('Multiple files found for {} format. Using {}.'\n                                        .format(self.format, files[0]))\n                    self.logger.warning('Use \"filename\"parameter instead of '\n                                        '\"format\" to specify a different file.')\n                return files[0]\n\n    def init_resources(self, context):  # pylint: disable=unused-argument\n        # Needs to happen first, as it sets self.format, which is required by\n        # _find_host_video_file\n        self.validate()\n\n        ensure_directory_exists(self.video_directory)\n        self.host_video_file = self._find_host_video_file()\n\n    def setup(self, context):\n        super(ExoPlayer, self).setup(context)\n\n        grant_app_permissions(self.target, self.package)\n\n        self.device_video_file = self.target.path.join(self.target.working_directory,\n                                                       os.path.basename(self.host_video_file))\n        if self.force_dependency_push or not self.target.file_exists(self.device_video_file):\n            self.logger.info('Copying {} to device.'.format(self.host_video_file))\n            self.target.push(self.host_video_file, self.device_video_file)\n\n        self._original_orientation = self.target.get_rotation()\n        self.target.set_rotation(1 if self.landscape else 0)\n\n        self.play_cmd = 'am start -a {} -d \"file://{}\"'.format(self.action,\n                                                               self.device_video_file)\n\n        self.monitor = self.target.get_logcat_monitor(list(REGEXPS.values()))\n        self.monitor.start()\n\n    def run(self, context):\n        self.target.execute(self.play_cmd)\n\n        self.monitor.wait_for(REGEXPS['start'])\n        self.logger.info('Playing media file')\n\n        line = self.monitor.wait_for(REGEXPS['duration'])[0]\n        media_duration_s = int(round(float(re.search(REGEXPS['duration'], line)\n                                           .group('duration'))))\n\n        self.logger.info('Media duration is {} seconds'.format(media_duration_s))\n\n        if self.duration > media_duration_s:\n            raise ConfigError(\n                \"'duration' param ({}) longer than media duration ({})\".format(\n                    self.duration, media_duration_s))\n\n        if self.duration:\n            self.logger.info('Waiting {} seconds before ending playback'\n                             .format(self.duration))\n            time.sleep(self.duration)\n        else:\n            self.logger.info('Waiting for playback completion ({} seconds)'\n                             .format(media_duration_s))\n            self.monitor.wait_for(REGEXPS['end'], timeout=media_duration_s + 30)\n\n    def update_output(self, context):\n        regex = re.compile(REGEXPS['dropped_frames'])\n\n        dropped_frames = 0\n        for line in self.monitor.get_log():\n            match = regex.match(line)\n            if match:\n                dropped_frames += int(match.group('count'))\n\n        context.add_metric('exoplayer_dropped_frames', dropped_frames,\n                           lower_is_better=True)\n\n    def teardown(self, context):\n        super(ExoPlayer, self).teardown(context)\n        self.monitor.stop()\n        if self._original_orientation is not None:\n            self.target.set_rotation(self._original_orientation)\n"
  },
  {
    "path": "wa/workloads/geekbench/__init__.py",
    "content": "#    Copyright 2013-2025 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101\nimport os\nimport re\nimport tempfile\nimport json\nfrom collections import defaultdict\n\nfrom wa import Workload, ApkUiautoWorkload, Parameter\nfrom wa.framework.exception import ConfigError, WorkloadError\nfrom wa.utils.misc import capitalize\nfrom wa.utils.types import version_tuple, list_or_integer\nfrom wa.utils.exec_control import once\n\n\nclass Geekbench(ApkUiautoWorkload):\n\n    name = 'geekbench'\n    description = \"\"\"\n    Geekbench provides a comprehensive set of benchmarks engineered to quickly\n    and accurately measure processor and memory performance.\n\n    http://www.primatelabs.com/geekbench/\n    From the website:\n    Designed to make benchmarks easy to run and easy to understand, Geekbench\n    takes the guesswork out of producing robust and reliable benchmark results.\n    Geekbench scores are calibrated against a baseline score of 1,000 (which is\n    the score of a single-processor Power Mac G5 @ 1.6GHz). Higher scores are\n    better, with double the score indicating double the performance.\n\n    The benchmarks fall into one of four categories:\n        - integer performance.\n        - floating point performance.\n        - memory performance.\n        - stream performance.\n\n    Geekbench benchmarks: http://www.primatelabs.com/geekbench/doc/benchmarks.html\n    Geekbench scoring methedology:\n    http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-scores\n    \"\"\"\n    summary_metrics = ['score', 'multicore_score']\n\n    supported_versions = ['6', '5', '4.4.2', '4.4.0', '4.3.4', '4.3.2', '4.3.1', '4.2.0', '4.0.1', '3.4.1', '3.0.0', '2']\n    package_names = ['com.primatelabs.geekbench6', 'com.primatelabs.geekbench5', 'com.primatelabs.geekbench', 'com.primatelabs.geekbench3', 'ca.primatelabs.geekbench2']\n\n    begin_regex = re.compile(r'^\\s*D/WebViewClassic.loadDataWithBaseURL\\(\\s*\\d+\\s*\\)'\n                             r'\\s*:\\s*(?P<content>\\<.*)\\s*$')\n    replace_regex = re.compile(r'<[^>]*>')\n\n    parameters = [\n        Parameter('version', allowed_values=supported_versions,\n                  description='Specifies which version of the workload should be run.',\n                  override=True),\n        Parameter('loops', kind=int, default=1, aliases=['times'],\n                  description=('Specfies the number of times the benchmark will be run in a \"tight '\n                               'loop\", i.e. without performaing setup/teardown inbetween.')),\n        Parameter('timeout', kind=int, default=3600,\n                  description=('Timeout for a single iteration of the benchmark. This value is '\n                               'multiplied by ``times`` to calculate the overall run timeout. ')),\n        Parameter('disable_update_result', kind=bool, default=False,\n                  description=('If ``True`` the results file will not be pulled from the targets '\n                               '``/data/data/com.primatelabs.geekbench`` folder.  This allows the '\n                               'workload to be run on unrooted targets and the results extracted '\n                               'manually later.')),\n    ]\n\n    is_corporate = False\n\n    phones_home = True\n\n    requires_network = True\n\n    def initialize(self, context):\n        super(Geekbench, self).initialize(context)\n        self.gui.uiauto_params['version'] = self.version\n        self.gui.uiauto_params['loops'] = self.loops\n        self.gui.uiauto_params['is_corporate'] = self.is_corporate\n        self.gui.timeout = self.timeout\n        if not self.disable_update_result and not self.target.is_rooted:\n            raise WorkloadError(\n                'Geekbench workload requires root to collect results. '\n                'You can set disable_update_result=True in the workload params '\n                'to run without collecting results.')\n\n    def setup(self, context):\n        super(Geekbench, self).setup(context)\n        self.run_timeout = self.timeout * self.loops\n\n    def update_output(self, context):\n        super(Geekbench, self).update_output(context)\n        if not self.disable_update_result:\n            major_version = version_tuple(self.version)[0]\n            update_method = getattr(self, 'update_result_{}'.format(major_version))\n            update_method(context)\n\n    def validate(self):\n        if (self.loops > 1) and (self.version == '2'):\n            raise ConfigError('loops parameter is not supported for version 2 of Geekbench.')\n\n    def update_result_2(self, context):\n        score_calculator = GBScoreCalculator()\n        score_calculator.parse(self.logcat_log)\n        score_calculator.update_results(context)\n\n    def update_result_3(self, context):\n        outfile_glob = self.target.path.join(self.target.package_data_directory, self.apk.package, 'files', '*gb3')\n        on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),\n                                                                         as_root=True).split('\\n') if f]\n        for i, on_target_output_file in enumerate(on_target_output_files):\n            host_temp_file = tempfile.mktemp()\n            self.target.pull(on_target_output_file, host_temp_file, as_root=True)\n            host_output_file = os.path.join(context.output_directory, os.path.basename(on_target_output_file))\n            with open(host_temp_file) as fh:\n                data = json.load(fh)\n            os.remove(host_temp_file)\n            with open(host_output_file, 'w') as wfh:\n                json.dump(data, wfh, indent=4)\n            context.add_artifact('geekout', host_output_file, kind='data',\n                                 description='Geekbench 3 output from target.')\n            context.add_metric(namemify('score', i), data['score'])\n            context.add_metric(namemify('multicore_score', i), data['multicore_score'])\n            for section in data['sections']:\n                context.add_metric(namemify(section['name'] + '_score', i), section['score'])\n                context.add_metric(namemify(section['name'] + '_multicore_score', i),\n                                   section['multicore_score'])\n\n    def update_result(self, context):\n        outfile_glob = self.target.path.join(self.target.package_data_directory, self.apk.package, 'files', '*gb*')\n        on_target_output_files = [f.strip() for f in self.target.execute('ls {}'.format(outfile_glob),\n                                                                         as_root=True).split('\\n') if f]\n        for i, on_target_output_file in enumerate(on_target_output_files):\n            host_temp_file = tempfile.mktemp()\n            self.target.pull(on_target_output_file, host_temp_file, as_root=True)\n            host_output_file = os.path.join(context.output_directory, os.path.basename(on_target_output_file))\n            with open(host_temp_file) as fh:\n                data = json.load(fh)\n            os.remove(host_temp_file)\n            with open(host_output_file, 'w') as wfh:\n                json.dump(data, wfh, indent=4)\n            context.add_artifact('geekout', host_output_file, kind='data',\n                                 description='Geekbench output from target.')\n            context.add_metric(namemify('score', i), data['score'])\n            context.add_metric(namemify('multicore_score', i), data['multicore_score'])\n            for section in data['sections']:\n                context.add_metric(namemify(section['name'] + '_score', i), section['score'])\n                for workloads in section['workloads']:\n                    workload_name = workloads['name'].replace(\" \", \"-\")\n                    context.add_metric(namemify(section['name'] + '_' + workload_name + '_score', i),\n                                       workloads['score'])\n\n    update_result_4 = update_result\n    update_result_5 = update_result\n    update_result_6 = update_result\n\n\nclass GBWorkload(object):\n    \"\"\"\n    Geekbench workload (not to be confused with WA's workloads). This is a single test run by\n    geek bench, such as preforming compression or generating Madelbrot.\n    \"\"\"\n\n    # Index maps onto the hundreds digit of the ID.\n    categories = [None, 'integer', 'float', 'memory', 'stream']\n\n    # 2003 entry-level Power Mac G5 is considered to have a baseline score of\n    # 1000 for every category.\n    pmac_g5_base_score = 1000\n\n    units_conversion_map = {\n        'K': 1,\n        'M': 1000,\n        'G': 1000000,\n    }\n\n    def __init__(self, wlid, name, pmac_g5_st_score, pmac_g5_mt_score):\n        \"\"\"\n        :param wlid: A three-digit workload ID. Uniquely identifies a workload and also\n                     determines the category a workload belongs to.\n        :param name: The name of the workload.\n        :param pmac_g5_st_score: Score achieved for this workload on 2003 entry-level\n                                 Power Mac G5 running in a single thread.\n        :param pmac_g5_mt_score: Score achieved for this workload on 2003 entry-level\n                                 Power Mac G5 running in multiple threads.\n        \"\"\"\n        self.wlid = wlid\n        self.name = name\n        self.pmac_g5_st_score = pmac_g5_st_score\n        self.pmac_g5_mt_score = pmac_g5_mt_score\n        self.category = self.categories[int(wlid) // 100]\n        self.collected_results = []\n\n    def add_result(self, value, units):\n        self.collected_results.append(self.convert_to_kilo(value, units))\n\n    def convert_to_kilo(self, value, units):\n        return value * self.units_conversion_map[units[0]]\n\n    def clear(self):\n        self.collected_results = []\n\n    def get_scores(self):\n        \"\"\"\n        Returns a tuple (single-thraded score, multi-threaded score) for this workload.\n        Some workloads only have a single-threaded score, in which case multi-threaded\n        score will be ``None``.\n        Geekbench will perform four iterations of each workload in single-threaded and,\n        for some workloads, multi-threaded configurations. Thus there should always be\n        either four or eight scores collected for each workload. Single-threaded iterations\n        are always done before multi-threaded, so the ordering of the scores can be used\n        to determine which configuration they belong to.\n        This method should not be called before score collection has finished.\n        \"\"\"\n        no_of_results = len(self.collected_results)\n        if no_of_results == 4:\n            return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score), None)\n        if no_of_results == 8:\n            return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score),\n                    self._calculate(self.collected_results[4:], self.pmac_g5_mt_score))\n        else:\n            msg = 'Collected {} results for Geekbench {} workload;'.format(no_of_results, self.name)\n            msg += ' expecting either 4 or 8.'\n            raise WorkloadError(msg)\n\n    def _calculate(self, values, scale_factor):\n        return max(values) * self.pmac_g5_base_score / scale_factor\n\n    def __str__(self):\n        return self.name\n\n    __repr__ = __str__\n\n\nclass GBScoreCalculator(object):\n    \"\"\"\n    Parses logcat output to extract raw Geekbench workload values and converts them into\n    category and overall scores.\n    \"\"\"\n\n    result_regex = re.compile(r'workload (?P<id>\\d+) (?P<value>[0-9.]+) '\n                              r'(?P<units>[a-zA-Z/]+) (?P<time>[0-9.]+)s')\n\n    # Indicates contribution to the overall score.\n    category_weights = {\n        'integer': 0.3357231,\n        'float': 0.3594,\n        'memory': 0.1926489,\n        'stream': 0.1054738,\n    }\n\n    workloads = [\n        #          ID    Name        Power Mac ST  Power Mac MT\n        GBWorkload(101, 'Blowfish',         43971,   40979),  # NOQA\n        GBWorkload(102, 'Text Compress',    3202,    3280),  # NOQA\n        GBWorkload(103, 'Text Decompress',  4112,    3986),  # NOQA\n        GBWorkload(104, 'Image Compress',   8272,    8412),  # NOQA\n        GBWorkload(105, 'Image Decompress', 16800,   16330),  # NOQA\n        GBWorkload(107, 'Lua',              385,     385),  # NOQA\n\n        GBWorkload(201, 'Mandelbrot',       665589,  653746),  # NOQA),\n        GBWorkload(202, 'Dot Product',      481449,  455422),  # NOQA,\n        GBWorkload(203, 'LU Decomposition', 889933,  877657),  # NOQA\n        GBWorkload(204, 'Primality Test',   149394,  185502),  # NOQA\n        GBWorkload(205, 'Sharpen Image',    2340,    2304),  # NOQA\n        GBWorkload(206, 'Blur Image',       791,     787),  # NOQA\n\n        GBWorkload(302, 'Read Sequential',  1226708, None),  # NOQA\n        GBWorkload(304, 'Write Sequential', 683782,  None),  # NOQA\n        GBWorkload(306, 'Stdlib Allocate',  3739,    None),  # NOQA\n        GBWorkload(307, 'Stdlib Write',     2070681, None),  # NOQA\n        GBWorkload(401, 'Stream Copy',      1367892, None),  # NOQA\n        GBWorkload(402, 'Stream Scale',     1296053, None),  # NOQA\n        GBWorkload(403, 'Stream Add',       1507115, None),  # NOQA\n        GBWorkload(404, 'Stream Triad',     1384526, None),  # NOQA\n    ]\n\n    def __init__(self):\n        self.workload_map = {wl.wlid: wl for wl in self.workloads}\n\n    def parse(self, filepath):\n        \"\"\"\n        Extract results from the specified file. The file should contain a logcat log of Geekbench execution.\n        Iteration results in the log appear as 'I/geekbench' category entries in the following format::\n         |                     worklod ID          value      units   timing\n         |                         \\-------------    |     ----/     ---/\n         |                                      |    |     |         |\n         |  I/geekbench(29026): [....] workload 101 132.9 MB/sec 0.0300939s\n         |      |               |\n         |      |               -----\\\n         |      label    random crap we don't care about\n        \"\"\"\n        for wl in self.workloads:\n            wl.clear()\n        with open(filepath) as fh:\n            for line in fh:\n                match = self.result_regex.search(line)\n                if match:\n                    wkload = self.workload_map[int(match.group('id'))]\n                    wkload.add_result(float(match.group('value')), match.group('units'))\n\n    def update_results(self, context):\n        \"\"\"\n        http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-2-scores\n        From the website:\n        Each workload's performance is compared against a baseline to determine a score. These\n        scores are averaged together to determine an overall, or Geekbench, score for the system.\n        Geekbench uses the 2003 entry-level Power Mac G5 as the baseline with a score of 1,000\n        points. Higher scores are better, with double the score indicating double the performance.\n        Geekbench provides three different kinds of scores:\n            :Workload Scores: Each time a workload is executed Geekbench calculates a score based\n                              on the computer's performance compared to the baseline\n                              performance. There can be multiple workload scores for the\n                              same workload as Geekbench can execute each workload multiple\n                              times with different settings. For example, the \"Dot Product\"\n                              workload is executed four times (single-threaded scalar code,\n                              multi-threaded scalar code, single-threaded vector code, and\n                              multi-threaded vector code) producing four \"Dot Product\" scores.\n            :Section Scores: A section score is the average of all the workload scores for\n                             workloads that are part of the section. These scores are useful\n                             for determining the performance of the computer in a particular\n                             area. See the section descriptions above for a summary on what\n                             each section measures.\n            :Geekbench Score: The Geekbench score is the weighted average of the four section\n                              scores. The Geekbench score provides a way to quickly compare\n                              performance across different computers and different platforms\n                              without getting bogged down in details.\n        \"\"\"\n        scores_by_category = defaultdict(list)\n        for wkload in self.workloads:\n            st_score, mt_score = wkload.get_scores()\n            scores_by_category[wkload.category].append(st_score)\n            context.add_metric(wkload.name + ' (single-threaded)', int(st_score))\n            if mt_score is not None:\n                scores_by_category[wkload.category].append(mt_score)\n                context.add_metric(wkload.name + ' (multi-threaded)', int(mt_score))\n\n        overall_score = 0\n        for category in scores_by_category:\n            scores = scores_by_category[category]\n            category_score = sum(scores) / len(scores)\n            overall_score += category_score * self.category_weights[category]\n            context.add_metric(capitalize(category) + ' Score', int(category_score))\n        context.add_metric('Geekbench Score', int(overall_score))\n\n\nclass GeekbenchCorproate(Geekbench):  # pylint: disable=too-many-ancestors\n    name = \"geekbench-corporate\"\n    is_corporate = True\n    requires_network = False\n    supported_versions = ['4.1.0', '4.3.4', '5.0.0', '5.0.1', '5.0.3', '5.4.6']\n    package_names = ['com.primatelabs.geekbench4.corporate', 'com.primatelabs.geekbench5.corporate']\n    activity = 'com.primatelabs.geekbench.HomeActivity'\n\n    parameters = [\n        Parameter('version', allowed_values=supported_versions, override=True)\n    ]\n\n\ndef namemify(basename, i):\n    return basename + (' {}'.format(i) if i else '')\n\n\nclass GeekbenchCmdline(Workload):\n\n    name = \"geekbench_cli\"\n    description = \"Workload for running command line version Geekbench\"\n\n    gb6_workloads = {\n        # Single-Core and Multi-Core\n        101: 'File Compression',\n        102: 'Navigation',\n        103: 'HTML5 Browser',\n        104: 'PDF Renderer',\n        105: 'Photo Library',\n        201: 'Clang',\n        202: 'Text Processing',\n        203: 'Asset Compression',\n        301: 'Object Detection',\n        402: 'Object Remover',\n        403: 'HDR',\n        404: 'Photo Filter',\n        501: 'Ray Tracer',\n        502: 'Structure from Motion',\n        # OpenCL and Vulkan\n        303: 'Face Detection',\n        406: 'Edge Detection',\n        407: 'Gaussian Blur',\n        503: 'Feature Matching',\n        504: 'Stereo Matching',\n        601: 'Particle Physics',\n        # Single-Core, Multi-Core, OpenCL, and Vulkan\n        302: 'Background Blur',\n        401: 'Horizon Detection',\n    }\n\n    gb5_workloads = {\n        # Single-Core and Multi-Core\n        101: 'AES-XTS',\n        201: 'Text Compression',\n        202: 'Image Compression',\n        203: 'Navigation',\n        204: 'HTML5',\n        205: 'SQLite',\n        206: 'PDF Rendering',\n        207: 'Text Rendering',\n        208: 'Clang',\n        209: 'Camera',\n        301: 'N-Body Physics',\n        302: 'Rigid Body Physics',\n        307: 'Image Inpainting',\n        308: 'HDR',\n        309: 'Ray Tracing',\n        310: 'Structure from Motion',\n        312: 'Speech Recognition',\n        313: 'Machine Learning',\n        # OpenCL and Vulkan\n        220: 'Sobel',\n        221: 'Canny',\n        222: 'Stereo Matching',\n        230: 'Histogram Equalization',\n        304: 'Depth of Field',\n        311: 'Feature Matching',\n        320: 'Particle Physics',\n        321: 'SFFT',\n        # Single-Core, Multi-Core, OpenCL, and Vulkan\n        303: 'Gaussian Blur',\n        305: 'Face Detection',\n        306: 'Horizon Detection',\n    }\n\n    binary_name = 'geekbench_aarch64'\n\n    allowed_extensions = ['json', 'csv', 'xml', 'html', 'text']\n\n    parameters = [\n        Parameter('cpumask', kind=str, default='',\n                  description='CPU mask used by taskset.'),\n        Parameter('section', kind=int, default=1, allowed_values=[1, 4, 9],\n                  description=\"\"\"Run the specified sections. It should be 1 for CPU benchmarks,\n                  4 for OpenCL benchmarks and 9 for Vulkan benchmarks.\"\"\"),\n        Parameter('upload', kind=bool, default=False,\n                  description='Upload results to Geekbench Browser'),\n        Parameter('is_single_core', kind=bool, default=True,\n                  description='Run workload in single-core or multi-core mode.'),\n        Parameter('workload', kind=list_or_integer, default=301,\n                  description='Specify workload to run'),\n        Parameter('iterations', kind=int, default=5,\n                  description='Number of iterations'),\n        Parameter('workload_gap', kind=int, default=2000,\n                  description='N milliseconds gap between workloads'),\n        Parameter('output_file', kind=str, default='gb_cli.json',\n                  description=f\"\"\"Specify the name of the output results file.\n                  If it is not specified, the output file will be generated as a JSON file.\n                  It can be {', '.join(allowed_extensions)} files.\"\"\"),\n        Parameter('timeout', kind=int, default=2000,\n                  description='The test timeout in ms. It should be long for 1000 iterations.'),\n        Parameter('version', kind=str, default='6.3.0',\n                  description='Specifies which version of the Geekbench should run.'),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(GeekbenchCmdline, self).__init__(target, **kwargs)\n        self.target_result_json = None\n        self.host_result_json = None\n        self.workloads = self.gb6_workloads\n        self.params = ''\n        self.output = ''\n        self.target_exec_directory = ''\n        self.tar_file_src = ''\n        self.tar_file_dst = ''\n        self.file_exists = False\n\n    def init_resources(self, context):\n        \"\"\"\n        Retrieves necessary files to run the benchmark in TAR format.\n        WA will look for `gb_cli_artifacts_<version>.tar` file to deploy them to the\n        working directory. If there is no specified version, it will look for version\n        6.3.0 by default.\n        \"\"\"\n        self.deployable_assets = [''.join(['gb_cli_artifacts', '_', self.version, '.tar'])]\n\n        # Create an executables directory\n        self.target_exec_directory = self.target.path.join(self.target.executables_directory, f'gb_cli-{self.version}')\n        self.target.execute(\"mkdir -p {}\".format(self.target_exec_directory))\n\n        # Source and Destination paths for the artifacts tar file\n        self.tar_file_src = self.target.path.join(self.target.working_directory, self.deployable_assets[0])\n        self.tar_file_dst = self.target.path.join(self.target_exec_directory, self.deployable_assets[0])\n        # Check the tar file if it already exists\n        if self.target.file_exists(self.tar_file_dst):\n            self.file_exists = True\n        else:\n            # Get the assets file\n            super(GeekbenchCmdline, self).init_resources(context)\n\n    @once\n    def initialize(self, context):\n        if self.version[0] == '5':\n            self.workloads = self.gb5_workloads\n        # If the tar file does not exist in the target, deploy the assets\n        if not self.file_exists:\n            super(GeekbenchCmdline, self).initialize(context)\n            # Move the tar file to the executables directory\n            self.target.execute(\n                '{} mv {} {}'.format(\n                    self.target.busybox, self.tar_file_src, self.tar_file_dst))\n            # Extract the tar file\n            self.target.execute(\n                '{} tar -xf {} -C {}'.format(\n                    self.target.busybox, self.tar_file_dst, self.target_exec_directory))\n\n    def setup(self, context):\n        super(GeekbenchCmdline, self).setup(context)\n\n        self.params = ''\n\n        self.params += '--section {} '.format(self.section)\n        if self.section == 1:\n            self.params += '--single-core ' if self.is_single_core else '--multi-core '\n\n        self.params += '--upload ' if self.upload else '--no-upload '\n\n        known_workloads = '\\n'.join(\"{}: {}\".format(k, v) for k, v in self.workloads.items())\n        if any([t not in self.workloads.keys() for t in self.workload]):\n            msg = 'Unknown workload(s) specified. Known workloads: {}'\n            raise ValueError(msg.format(known_workloads))\n\n        self.params += '--workload {} '.format(''.join(\"{},\".format(i) for i in self.workload))\n\n        if self.iterations:\n            self.params += '--iterations {} '.format(self.iterations)\n\n        if self.workload_gap:\n            self.params += '--workload-gap {} '.format(self.workload_gap)\n\n        extension = os.path.splitext(self.output_file)[1][1:]\n        if self.output_file and extension not in self.allowed_extensions:\n            msg = f\"No allowed extension specified. Allowed extensions: {', '.join(self.allowed_extensions)}\"\n            raise ValueError(msg)\n        elif self.output_file:\n            # Output results file with the given name and extension\n            self.target_result_json = os.path.join(self.target_exec_directory, self.output_file)\n            self.params += '--export-{} {}'.format(extension, self.target_result_json)\n            self.host_result_json = os.path.join(context.output_directory, self.output_file)\n        else:\n            # The output file is not specified\n            self.target_result_json = os.path.join(self.target_exec_directory, self.output_file)\n            self.params += '--save {}'.format(self.target_result_json)\n            self.host_result_json = os.path.join(context.output_directory, self.output_file)\n\n    def run(self, context):\n        super(GeekbenchCmdline, self).run(context)\n        taskset = f\"taskset {self.cpumask}\" if self.cpumask else \"\"\n        binary = self.target.path.join(self.target_exec_directory, self.binary_name)\n        cmd = '{} {} {}'.format(taskset, binary, self.params)\n\n        try:\n            self.output = self.target.execute(cmd, timeout=self.timeout, as_root=True)\n        except KeyboardInterrupt:\n            self.target.killall(self.binary_name)\n            raise\n\n    def update_output(self, context):\n        super(GeekbenchCmdline, self).update_output(context)\n        if not self.output:\n            return\n        for workload in self.workload:\n            scores = []\n            matches = re.findall(self.workloads[workload] + '(.+\\d)', self.output)\n            for match in matches:\n                scores.append(int(re.search(r'\\d+', match).group(0)))\n            if self.section == 4:\n                context.add_metric(\"OpenCL Score \" + self.workloads[workload], scores[0])\n            elif self.section == 9:\n                context.add_metric(\"Vulkan Score \" + self.workloads[workload], scores[0])\n            else:\n                context.add_metric(\"Single-Core Score \" + self.workloads[workload], scores[0])\n                if not self.is_single_core:\n                    context.add_metric(\"Multi-Core Score \" + self.workloads[workload], scores[1])\n\n    def extract_results(self, context):\n        # Extract results on the target\n        super(GeekbenchCmdline, self).extract_results(context)\n        self.target.pull(self.target_result_json, self.host_result_json)\n        context.add_artifact('GeekbenchCmdline_results', self.host_result_json, kind='raw')\n\n    @once\n    def finalize(self, context):\n        if self.cleanup_assets:\n            self.target.remove(self.target_exec_directory)\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.geekbench\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    compile fileTree(dir: 'libs', include: ['*.jar'])\n    compile 'com.android.support.test:runner:0.5'\n    compile 'com.android.support.test:rules:0.5'\n    compile 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    compile(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.geekbench\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/app/src/main/java/com/arm/wa/uiauto/geekbench/UiAutomation.java",
    "content": "/*    Copyright 2013-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.geekbench;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.view.KeyEvent;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n// import com.arm.wa.uiauto.UxPerfUiAutomation;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"geekbench\";\n    public static final long WAIT_TIMEOUT_5SEC = TimeUnit.SECONDS.toMillis(5);\n    public static final long WAIT_TIMEOUT_20MIN = TimeUnit.SECONDS.toMillis(20 * 60);\n\n    Bundle params;\n    String[] version;\n    Integer majorVersion;\n    Integer minorVersion;\n    Boolean isCorporate;\n    Integer loops;\n\n    @Before\n    public void initialize() {\n        params = getParams();\n        version = params.getString(\"version\").split(\"\\\\.\");\n        majorVersion = Integer.parseInt(version[0]);\n\tif (version.length > 1)\n\t\tminorVersion = Integer.parseInt(version[1]);\n        isCorporate = params.getBoolean(\"is_corporate\");\n        loops = params.getInt(\"loops\");\n    }\n\n    @Test\n    @Override\n    public void setup() throws Exception {\n        initialize_instrumentation();\n        dismissAndroidVersionPopup();\n\n        if (!isCorporate)\n            dismissEula();\n    }\n\n    @Test\n    @Override\n    public void runWorkload() throws Exception {\n        for (int i = 0; i < loops; i++) {\n            switch (majorVersion) {\n                case 2:\n                    // In version 2, we scroll through the results WebView to make sure\n                    // all results appear on the screen, which causes them to be dumped into\n                    // logcat by the Linaro hacks.\n                    runBenchmarks();\n                    waitForResultsv2();\n                    scrollThroughResults();\n                    break;\n                case 3:\n                    runBenchmarks();\n                    waitForResultsv3onwards();\n                    if (minorVersion < 4) {\n                        // Attempting to share the results will generate the .gb3 file with\n                        // results that can then be pulled from the device. This is not possible\n                        // in verison 2 of Geekbench (Share option was added later).\n                        // Sharing is not necessary from 3.4.1 onwards as the .gb3 files are always\n                        // created.\n                        shareResults();\n                    }\n                    break;\n                case 4:\n                case 5:\n                case 6:\n                    runCpuBenchmarks(isCorporate);\n                    waitForResultsv3onwards();\n                    break;\n                default :\n                    throw new RuntimeException(\"Invalid version of Geekbench requested\");\n            }\n\n            if (i < (loops - 1)) {\n                mDevice.pressBack();\n                if (majorVersion < 4)\n                    mDevice.pressBack();  // twice\n            }\n        }\n\n        Bundle status = new Bundle();\n        mInstrumentation.sendStatus(Activity.RESULT_OK, status);\n    }\n\n    public void dismissEula() throws Exception {\n        UiObject acceptButton =\n            //mDevice.findObject(new UiSelector().textContains(\"Accept\")\n           mDevice.findObject(new UiSelector().resourceId(\"android:id/button1\")\n                                         .className(\"android.widget.Button\"));\n        if (!acceptButton.waitForExists(WAIT_TIMEOUT_5SEC)) {\n            throw new UiObjectNotFoundException(\"Could not find Accept button\");\n        }\n        acceptButton.click();\n    }\n\n    public void runBenchmarks() throws Exception {\n        UiObject runButton =\n           mDevice.findObject(new UiSelector().textContains(\"Run Benchmark\")\n                                         .className(\"android.widget.Button\"));\n        if (!runButton.waitForExists(WAIT_TIMEOUT_5SEC)) {\n            throw new UiObjectNotFoundException(\"Could not find Run button\");\n        }\n        runButton.click();\n    }\n\n    public void runCpuBenchmarks(boolean isCorporate) throws Exception {\n        // The run button is at the bottom of the view and may be off the screen so swipe to be sure\n        uiDeviceSwipe(Direction.DOWN, 50);\n        scrollPage();\n\n        String packageName = isCorporate ? \"com.primatelabs.geekbench.*.corporate\"\n                                         : \"com.primatelabs.geekbench.*\";\n\n        UiObject runButton =\n\t    mDevice.findObject(new UiSelector().resourceIdMatches(packageName + \":id/runCpuBenchmarks\"));\n        if (!runButton.waitForExists(WAIT_TIMEOUT_5SEC)) {\n            throw new UiObjectNotFoundException(\"Could not find Run button\");\n        }\n        runButton.click();\n    }\n\n    public void waitForResultsv2() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject resultsWebview = mDevice.findObject(selector.className(\"android.webkit.WebView\"));\n        if (!resultsWebview.waitForExists(WAIT_TIMEOUT_20MIN)) {\n            throw new UiObjectNotFoundException(\"Did not see Geekbench results screen.\");\n        }\n    }\n\n    public void waitForResultsv3onwards() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject runningTextView = mDevice.findObject(selector.textContains(\"Running\")\n                                                        .className(\"android.widget.TextView\"));\n\n        if (!runningTextView.waitUntilGone(WAIT_TIMEOUT_20MIN)) {\n            throw new UiObjectNotFoundException(\"Did not get to Geekbench results screen.\");\n        }\n    }\n\n    public void scrollThroughResults() throws Exception {\n        UiSelector selector = new UiSelector();\n        mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);\n        sleep(1);\n        mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);\n        sleep(1);\n        mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);\n        sleep(1);\n        mDevice.pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);\n    }\n\n    public void shareResults() throws Exception {\n        sleep(2); // transition\n        UiSelector selector = new UiSelector();\n        mDevice.pressMenu();\n        UiObject shareButton = mDevice.findObject(selector.text(\"Share\")\n                                                    .className(\"android.widget.TextView\"));\n        shareButton.waitForExists(WAIT_TIMEOUT_5SEC);\n        shareButton.click();\n    }\n\n    public void scrollPage() throws Exception {\n        UiScrollable listView = new UiScrollable(new UiSelector().className(\"android.widget.ScrollView\"));\n        listView.setMaxSearchSwipes(3);\n        listView.scrollTextIntoView(\"RUN CPU BENCHMARK\");\n    }\n}\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:4.2.0'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n\nset -eux\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.geekbench\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.2-all.zip\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/geekbench/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/gfxbench/__init__.py",
    "content": "#    Copyright 2014-2016 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport re\n\nfrom wa import ApkUiautoWorkload, WorkloadError, Parameter\nfrom wa.utils.types import list_or_string\n\n\nclass Gfxbench(ApkUiautoWorkload):\n\n    name = 'gfxbench'\n    package_names = ['com.glbenchmark.glbenchmark27']\n    supported_versions = ['4', '5']\n    is_corporate = False\n    clear_data_on_reset = False\n    regex_template = 'name: \\((?P<test_name>.*)\\).*result: \\((?P<result>.*)?\\).* sub_result:.*\\((?P<sub_result>.*?)?\\).*'\n    description = '''\n    Execute a subset of graphical performance benchmarks\n\n    Test description:\n    1. Open the gfxbench application\n    2. Execute Car Chase, Manhattan and Tessellation benchmarks\n\n    Note: Some of the default tests are unavailable on devices running\n          with a smaller resolution than 1080p.\n\n    '''\n\n    default_test_list = [\n        \"Car Chase\",\n        \"1080p Car Chase Offscreen\",\n        \"Manhattan 3.1\",\n        \"1080p Manhattan 3.1 Offscreen\",\n        \"1440p Manhattan 3.1.1 Offscreen\",\n        \"Tessellation\",\n        \"1080p Tessellation Offscreen\",\n    ]\n\n    parameters = [\n        Parameter('timeout', kind=int, default=3600,\n                  description=('Timeout for an iteration of the benchmark.')),\n        Parameter('tests', kind=list_or_string, default=default_test_list,\n                  description=('List of tests to be executed.')),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(Gfxbench, self).__init__(target, **kwargs)\n        self.gui.timeout = self.timeout\n        self.gui.uiauto_params['tests'] = self.tests\n\n    # pylint: disable=too-many-locals\n    def update_output(self, context):\n        super(Gfxbench, self).update_output(context)\n        regex_matches = [re.compile(self.regex_template.format(t)) for t in self.tests]\n        logcat_file = context.get_artifact_path('logcat')\n        found = []\n        detected_results = 0\n        failed = False\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                for regex in regex_matches:\n                    match = regex.search(line)\n                    # Check if we have matched the score string in logcat and not already found.\n                    if match and match.group('test_name') not in found:\n                        found.append(match.group('test_name'))\n                        # Set Default values\n                        result = 'NaN'\n                        unit = 'FPS'\n\n                        # For most tests we usually want the `sub_result`\n                        # as this is our FPS value\n                        try:\n                            result = float(match.group('sub_result').split()[0].replace(',', ''))\n                        except (ValueError, TypeError):\n                            # However for some tests the value is stored in `result`\n                            # and the unit is saved in the `sub_result`.\n                            try:\n                                result = float(match.group('result').replace(',', ''))\n                                if match.group('sub_result'):\n                                    unit = match.group('sub_result').upper()\n                            except (ValueError, TypeError):\n                                failed = True\n\n                        entry = match.group('test_name')\n                        context.add_metric(entry, result, unit, lower_is_better=False)\n                        detected_results += 1\n\n        if failed or detected_results < len(regex_matches):\n            msg = \"The workload has failed to process all scores. Expected >={} scores, Detected {} scores.\"\n            raise WorkloadError(msg.format(len(regex_matches), detected_results))\n\n\nclass GfxbenchCorporate(Gfxbench):  # pylint: disable=too-many-ancestors\n\n    name = 'gfxbench-corporate'\n    package_names = ['net.kishonti.gfxbench.gl.v50000.corporate']\n    is_corporate = True\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.gfxbench\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.gfxbench\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/app/src/main/java/com/arm/wa/uiauto/gfxbench/UiAutomation.java",
    "content": "/*    Copyright 2014-2016 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.gfxbench;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.util.Log;\nimport android.graphics.Rect;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.ArrayList;\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    private int networkTimeoutSecs = 30;\n    private long networkTimeout =  TimeUnit.SECONDS.toMillis(networkTimeoutSecs);\n    private Boolean isCorporate;\n    public static String TAG = \"UXPERF\";\n    protected Bundle parameters;\n    protected String[] testList;\n    protected String packageID;\n\n\n    @Before\n    public void initialize(){\n        parameters = getParams();\n        testList = parameters.getStringArray(\"tests\");\n        packageID = getPackageID(parameters);\n        isCorporate = parameters.getBoolean(\"is_corporate\");\n    }\n\n    @Test\n    public void setup() throws Exception{\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        clearFirstRun();\n\n        // Ensure we're on the home screen\n        UiObject homeButton = mDevice.findObject(\n                new UiSelector().resourceId(packageID + \"tabbar_back\"))\n                                .getChild(new UiSelector().index(0));\n        homeButton.click();\n\n        //Calculate the location of the test selection button\n        UiObject circle =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"main_circleControl\")\n            .className(\"android.widget.RelativeLayout\"));\n        Rect bounds = circle.getBounds();\n        int selectx = bounds.width()/4;\n        selectx = bounds.centerX() + selectx;\n        int selecty = bounds.height()/4;\n        selecty = bounds.centerY() + selecty;\n\n        Log.d(TAG, \"maxx \" + selectx);\n        Log.d(TAG, \"maxy \" + selecty);\n\n        mDevice.click(selectx,selecty);\n\n        // Disable test categories\n        toggleTest(\"High-Level Tests\");\n        toggleTest(\"Low-Level Tests\");\n        toggleTest(\"Special Tests\");\n\tif (isCorporate)\n\t\ttoggleTest(\"Fixed Time Test\");\n\n        // Enable selected tests\n        for (String test : testList) {\n            toggleTest(test);\n        }\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        runBenchmark();\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        getScores();\n    }\n\n    @Test\n    public void teardown() throws Exception{\n        unsetScreenOrientation();\n    }\n\n    public void clearFirstRun() throws Exception {\n        UiObject accept =\n            mDevice.findObject(new UiSelector().resourceId(\"android:id/button1\")\n                .className(\"android.widget.Button\"));\n        if (accept.exists()){\n            accept.click();\n            sleep(5);\n        }\n        UiObject sync =\n                mDevice.findObject(new UiSelector().text(\"Data synchronization\")\n                    .className(\"android.widget.TextView\"));\n        if (!sync.exists()){\n            sync = mDevice.findObject(new UiSelector().text(\"Pushed data not found\")\n                    .className(\"android.widget.TextView\"));\n        }\n        if (sync.exists()){\n            UiObject data =\n                mDevice.findObject(new UiSelector().resourceId(\"android:id/button1\")\n                    .className(\"android.widget.Button\"));\n            data.click();\n        }\n\n        UiObject home =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"main_view_back\")\n                .className(\"android.widget.LinearLayout\"));\n            home.waitForExists(300000);\n    }\n\n    public void runBenchmark() throws Exception {\n        //Start the tests\n        UiObject start =\n            mDevice.findObject(new UiSelector().text(\"Start\"));\n        start.click();\n\n        //Wait for results\n        UiObject complete =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"results_testList\"));\n        complete.waitForExists(1200000);\n\n        UiObject outOfmemory = mDevice.findObject(new UiSelector().text(\"OUT_OF_MEMORY\"));\n        if (outOfmemory.exists()) {\n            throw new OutOfMemoryError(\"The workload has failed because the device is doing to much work.\");\n        }\n    }\n\n    public void getScores() throws Exception {\n        // To ensure we print all scores, some will be printed multiple times but these are filtered on the python side.\n        UiScrollable scrollable = new UiScrollable(new UiSelector().scrollable(true));\n        // Start at the bottom of the list as this seems more reliable when extracting results.\n        scrollable.flingToEnd(10);\n        Boolean top_of_list = false;\n        while(true) {\n            UiObject resultsList =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"results_testList\"));\n            // Find the element in the list that contains our test and pull result and sub_result\n            for (int i=1; i < resultsList.getChildCount(); i++) {\n                UiObject testName = resultsList.getChild(new UiSelector().index(i))\n                    .getChild(new UiSelector().resourceId(packageID + \"updated_result_item_name\"));\n                UiObject result = resultsList.getChild(new UiSelector()\n                                    .index(i))\n                                    .getChild(new UiSelector()\n                                    .resourceId(packageID + \"updated_result_item_result\"));\n                UiObject subResult = resultsList.getChild(new UiSelector()\n                                    .index(i))\n                                    .getChild(new UiSelector()\n                                    .resourceId(packageID + \"updated_result_item_subresult\"));\n                if (testName.waitForExists(500) && result.waitForExists(500) && subResult.waitForExists(500)) {\n                    Log.d(TAG, \"name: (\" + testName.getText() + \") result: (\" + result.getText() + \") sub_result: (\" + subResult.getText() + \")\");\n                }\n            }\n            // Ensure we loop over the first screen an extra time once the top of the list has been reached.\n            if (top_of_list){\n                break;\n            }\n            top_of_list = !scrollable.scrollBackward(100);\n        }\n    }\n\n    public void toggleTest(String testname) throws Exception {\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true)\n                                                .resourceId(packageID + \"main_testSelectListView\"));\n        UiObject test =\n            mDevice.findObject(new UiSelector().text(testname));\n        if (!test.exists() && list.waitForExists(60)) {\n            list.flingToBeginning(10);\n            list.scrollIntoView(test);\n        }\n        test.click();\n    }\n}\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/build.sh",
    "content": "#!/bin/bash\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.gfxbench\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Thu Jun 08 14:26:39 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/gfxbench/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/glbenchmark/__init__.py",
    "content": "#    Copyright 2013-2015 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,E0203\nimport re\nimport os\n\nfrom wa import ApkUiautoWorkload, Parameter, Alias\nfrom wa.framework.exception import ConfigError\n\n# These maps provide use-friendly aliases for the most common options.\nUSE_CASE_MAP = {\n    'egypt': 'GLBenchmark 2.5 Egypt HD',\n    'egypt-classic': 'GLBenchmark 2.1 Egypt Classic',\n    't-rex': 'GLBenchmark 2.7 T-Rex HD',\n}\n\nTYPE_MAP = {\n    'onscreen': 'C24Z16 Onscreen Auto',\n    'offscreen': 'C24Z16 Offscreen Auto',\n}\n\n\nclass Glb(ApkUiautoWorkload):\n\n    name = 'glbenchmark'\n    description = \"\"\"\n    Measures the graphics performance of Android devices by testing\n    the underlying OpenGL (ES) implementation.\n\n    http://gfxbench.com/about-gfxbench.jsp\n\n    From the website:\n\n        The benchmark includes console-quality high-level 3D animations\n        (T-Rex HD and Egypt HD) and low-level graphics measurements.\n\n        With high vertex count and complex effects such as motion blur, parallax\n        mapping and particle systems, the engine of GFXBench stresses GPUs in order\n        provide users a realistic feedback on their device.\n\n    \"\"\"\n    activity = 'com.glbenchmark.activities.GLBenchmarkDownloaderActivity'\n    view = 'com.glbenchmark.glbenchmark27/com.glbenchmark.activities.GLBRender'\n\n    package_names = ['com.glbenchmark.glbenchmark27', 'com.glbenchmark.glbenchmark25']\n    supported_versions = ['2.7', '2.5']\n\n    # If usecase is not specified the default usecase is the first supported usecase alias\n    # for the specified version.\n    supported_usecase_aliases = {\n        '2.7': ['t-rex', 'egypt'],\n        '2.5': ['egypt-classic', 'egypt'],\n    }\n\n    default_iterations = 1\n    install_timeout = 500\n    run_timeout = 4 * 60\n\n    regex = re.compile(r'GLBenchmark (metric|FPS): (.*)')\n\n    parameters = [\n        Parameter('version', allowed_values=supported_versions, override=True,\n                  description=('Specifies which version of the benchmark to run (different versions '\n                               'support different use cases).')),\n        Parameter('use_case', default=None,\n                  description=\"\"\"Specifies which usecase to run, as listed in the benchmark menu; e.g.\n                                 ``'GLBenchmark 2.5 Egypt HD'``. For convenience, two aliases are provided\n                                 for the most common use cases: ``'egypt'`` and ``'t-rex'``. These could\n                                 be use instead of the full use case title. For version ``'2.7'`` it defaults\n                                 to ``'t-rex'``, for version ``'2.5'`` it defaults to ``'egypt-classic'``.\n                  \"\"\"),\n        Parameter('type', default='onscreen',\n                  description=\"\"\"Specifies which type of the use case to run, as listed in the benchmarks\n                                 menu (small text underneath the use case name); e.g. ``'C24Z16 Onscreen Auto'``.\n                                 For convenience, two aliases are provided for the most common types:\n                                 ``'onscreen'`` and ``'offscreen'``. These may be used instead of full type\n                                 names.\n                  \"\"\"),\n        Parameter('timeout', kind=int, default=200,\n                  description=\"\"\"Specifies how long, in seconds, UI automation will wait for results screen to\n                                 appear before assuming something went wrong.\n                  \"\"\"),\n    ]\n\n    aliases = [\n        Alias('glbench'),\n        Alias('egypt', use_case='egypt'),\n        Alias('t-rex', use_case='t-rex'),\n        Alias('egypt_onscreen', use_case='egypt', type='onscreen'),\n        Alias('t-rex_onscreen', use_case='t-rex', type='onscreen'),\n        Alias('egypt_offscreen', use_case='egypt', type='offscreen'),\n        Alias('t-rex_offscreen', use_case='t-rex', type='offscreen'),\n    ]\n\n    def initialize(self, context):\n        super(Glb, self).initialize(context)\n        self.gui.uiauto_params['version'] = self.version\n        if self.use_case is None:\n            self.use_case = self.supported_usecase_aliases[self.version][0]\n        if self.use_case.lower() in USE_CASE_MAP:\n            if self.use_case not in self.supported_usecase_aliases[self.version]:\n                raise ConfigError('usecases {} is not supported in version {}'.format(self.use_case, self.version))\n            self.use_case = USE_CASE_MAP[self.use_case.lower()]\n        self.gui.uiauto_params['use_case'] = self.use_case.replace(' ', '_')\n\n        if self.type.lower() in TYPE_MAP:\n            self.type = TYPE_MAP[self.type.lower()]\n        self.gui.uiauto_params['usecase_type'] = self.type.replace(' ', '_')\n\n        self.gui.uiauto_params['timeout'] = self.run_timeout\n\n    def update_output(self, context):\n        super(Glb, self).update_output(context)\n        match_count = 0\n        with open(context.get_artifact_path('logcat')) as fh:\n            for line in fh:\n                match = self.regex.search(line)\n                if match:\n                    metric = match.group(1)\n                    value, units = match.group(2).split()\n                    value = value.replace('*', '')\n                    if metric == 'metric':\n                        metric = 'Frames'\n                        units = 'frames'\n                    metric = metric + '_' + str(match_count // 2)\n                    context.add_metric(metric, value, units)\n                    match_count += 1\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.glbenchmark\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.glbenchmark\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/app/src/main/java/com/arm/wa/uiauto/glbenchmark/UiAutomation.java",
    "content": "/*    Copyright 2013-2015 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.glbenchmark;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"glb\";\n    public static int maxScrolls = 15;\n\n    private Bundle parameters;\n    private String version;\n    private String useCase;\n    private String type;\n    private int testTimeoutSeconds;\n\n\n    @Before\n    public void initialize() {\n        parameters = getParams();\n        version = parameters.getString(\"version\");\n        useCase = parameters.getString(\"use_case\").replace('_', ' ');\n        type = parameters.getString(\"usecase_type\").replace('_', ' ');\n        testTimeoutSeconds = parameters.getInt(\"timeout\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        dismissAndroidVersionPopup();\n        goToPreformanceTestsMenu();\n        selectUseCase(version, useCase, type);\n    }\n    @Test\n    public void runWorkload() throws Exception {\n        hitStart();\n        waitForResults(version, useCase, testTimeoutSeconds);\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        extractBenchmarkResults();\n    }\n\n    public void goToPreformanceTestsMenu() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject choosePerfTest = mDevice.findObject(selector.text(\"Performance Tests\")\n                                                             .className(\"android.widget.TextView\"));\n        choosePerfTest.clickAndWaitForNewWindow();\n    }\n\n    public void selectUseCase(String version, String useCase, String type) throws Exception {\n        UiSelector selector = new UiSelector();\n        UiScrollable testList = new UiScrollable(selector.className(\"android.widget.ListView\"));\n        UiObject useCaseText = mDevice.findObject(selector.className(\"android.widget.TextView\")\n                                                          .text(useCase));\n        if (version.equals(\"2.7\")){\n                UiObject typeText =  useCaseText.getFromParent(selector.className(\"android.widget.TextView\")\n                                                                       .text(type));\n                int scrolls = 0;\n                while(!typeText.exists()) {\n                        testList.scrollForward();\n                        scrolls += 1;\n                        if (scrolls >= maxScrolls) {\n                                break;\n                        }\n                }\n                typeText.click();\n        }\n        else if (version.equals(\"2.5\")){\n                int scrolls = 0;\n                while(!useCaseText.exists()) {\n                        testList.scrollForward();\n                        scrolls += 1;\n                        if (scrolls >= maxScrolls) {\n                                break;\n                        }\n                }\n                useCaseText.click();\n                UiObject modeDisableModeButton = null;\n                if (type.contains(\"Onscreen\")){\n                        modeDisableModeButton = mDevice.findObject(selector.text(\"Offscreen\"));\n                }\n                else {\n                        modeDisableModeButton = mDevice.findObject(selector.text(\"Onscreen\"));\n                }\n                modeDisableModeButton.click();\n        }\n    }\n\n    public void hitStart() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject startButton = mDevice.findObject(selector.text(\"Start\"));\n        startButton.clickAndWaitForNewWindow();\n    }\n\n    public void waitForResults(String version, String useCase, int timeout) throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject results = null;\n        if (version.equals(\"2.7\"))\n                results = mDevice.findObject(selector.text(\"Results\").className(\"android.widget.TextView\"));\n        else\n                results =  mDevice.findObject(selector.text(useCase).className(\"android.widget.TextView\"));\n\tLog.v(TAG, \"Waiting for results screen.\");\n\t// On some devices, the results screen sometimes gets \"backgrounded\" (or\n\t// rather, doesn't seem to come to foreground to begin with). This code\n\t// attemps to deal with that by explicitly bringing glbench to the\n\t// foreground if results screen doesn't appear within testTimeoutSeconds seconds of\n\t// starting GLB.\n        if (!results.waitForExists(TimeUnit.SECONDS.toMillis(timeout))) {\n\t\tLog.v(TAG, \"Results screen not found. Attempting to bring to foreground.\");\n\t\tString[] commandLine = {\"am\", \"start\",\n\t\t\t\t\t\"-a\", \"android.intent.action.MAIN\",\n\t\t\t\t\t\"-c\", \"android.intent.category.LAUNCHER\",\n\t\t\t\t\t\"-n\", \"com.glbenchmark.glbenchmark27/com.glbenchmark.activities.GLBenchmarkDownloaderActivity\"};\n\t\tProcess proc = Runtime.getRuntime().exec(commandLine);\n\t\tproc.waitFor();\n\t\tLog.v(TAG, String.format(\"am start exit value: %d\", proc.exitValue()));\n\t\tif (!results.exists()) {\n\t\t\tthrow new UiObjectNotFoundException(\"Could not find results screen.\");\n\t\t}\n\t}\n\tLog.v(TAG, \"Results screen found.\");\n    }\n\n    public void extractBenchmarkResults() throws Exception {\n            Log.v(TAG, \"Extracting results.\");\n\t        sleep(2); // wait for the results screen to fully load.\n            UiSelector selector = new UiSelector();\n            UiObject fpsText = mDevice.findObject(selector.className(\"android.widget.TextView\")\n                                                          .textContains(\"fps\"));\n            UiObject otherText = fpsText.getFromParent(selector.className(\"android.widget.TextView\").index(0));\n\n            Log.v(TAG, String.format(\"GLBenchmark metric: %s\", otherText.getText().replace('\\n', ' ')));\n            Log.v(TAG, String.format(\"GLBenchmark FPS: %s\", fpsText.getText().replace('\\n', ' ')));\n    }\n}\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/build.sh",
    "content": "#!/bin/bash\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.glbenchmark\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/glbenchmark/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/gmail/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.framework.exception import ValidationError, WorkloadError\n\n\nclass Gmail(ApkUiautoWorkload):\n\n    name = 'gmail'\n    package_names = ['com.google.android.gm']\n    description = '''\n    A workload to perform standard productivity tasks within Gmail.  The workload carries out\n    various tasks, such as creating new emails, attaching images and sending them.\n\n    Test description:\n    1. Open Gmail application\n    2. Click to create New mail\n    3. Attach an image from the local images folder to the email\n    4. Enter recipient details in the To field\n    5. Enter text in the Subject field\n    6. Enter text in the Compose field\n    7. Click the Send mail button\n\n    To run the workload in offline mode, a 'mailstore.tar' file is required. In order to\n    generate such a file, Gmail should first be operated from an Internet-connected environment.\n    After this, the relevant database files can be found in the\n    '/data/data/com.google.android.gm/databases' directory. These files can then be archived to\n    produce a tarball using a command such as ``tar -cvf mailstore.tar -C /path/to/databases .``.\n    The result should then be placed in the '~/.workload_automation/dependencies/gmail/' directory\n    on your local machine, creating this if it does not already exist.\n\n    Known working APK version: 2023.04.02.523594694.Release\n    '''\n\n    parameters = [\n        Parameter('recipient', kind=str, default='wa-devnull@mailinator.com',\n                  description='''\n                  The email address of the recipient.  Setting a void address\n                  will stop any mesage failures clogging up your device inbox\n                  '''),\n        Parameter('test_image', kind=str, default='uxperf_1600x1200.jpg',\n                  description='''\n                  An image to be copied onto the device that will be attached\n                  to the email\n                  '''),\n        Parameter('offline_mode', kind=bool, default=False, description='''\n                  If set to ``True``, the workload will execute in offline mode.\n                  This mode requires root and makes use of a tarball of email\n                  database files 'mailstore.tar' for the email account to be used.\n                  This file is extracted directly to the application's 'databases'\n                  directory at '/data/data/com.google.android.gm/databases'.\n                  '''),\n    ]\n\n    @property\n    def requires_network(self):\n        return not self.offline_mode\n\n    @property\n    def requires_rerun(self):\n        # In offline mode we need to restart the application after modifying its data directory\n        return self.offline_mode\n\n    def __init__(self, target, **kwargs):\n        super(Gmail, self).__init__(target, **kwargs)\n        self.deployable_assets = [self.test_image]\n        if self.offline_mode:\n            self.deployable_assets.append('mailstore.tar')\n        self.cleanup_assets = True\n\n    def initialize(self, context):\n        super(Gmail, self).initialize(context)\n        if self.offline_mode and not self.target.is_rooted:\n            raise WorkloadError('This workload requires root to set up Gmail for offline usage.')\n\n    def init_resources(self, context):\n        super(Gmail, self).init_resources(context)\n        # Allows for getting working directory regardless if path ends with a '/'\n        work_dir = self.target.working_directory\n        work_dir = work_dir if work_dir[-1] != os.sep else work_dir[:-1]\n        self.gui.uiauto_params['workdir_name'] = self.target.path.basename(work_dir)\n        self.gui.uiauto_params['recipient'] = self.recipient\n        self.gui.uiauto_params['offline_mode'] = self.offline_mode\n        self.gui.uiauto_params['test_image'] = self.test_image\n        # Only accept certain image formats\n        if os.path.splitext(self.test_image.lower())[1] not in ['.jpg', '.jpeg', '.png']:\n            raise ValidationError('{} must be a JPEG or PNG file'.format(self.test_image))\n\n    def setup_rerun(self):\n        super(Gmail, self).setup_rerun()\n        database_src = self.target.path.join(self.target.working_directory, 'mailstore.tar')\n        database_dst = self.target.path.join(self.target.package_data_directory, self.package, 'databases')\n        existing_mailstores = self.target.path.join(database_dst, 'mailstore.*')\n        owner = self.target.execute(\"{} stat -c '%u' {}\".format(self.target.busybox, database_dst), as_root=True).strip()\n        self.target.execute('{} rm {}'.format(self.target.busybox, existing_mailstores), as_root=True)\n        self.target.execute('{} tar -xvf {} -C {}'.format(self.target.busybox, database_src, database_dst), as_root=True)\n        self.target.execute('{0} chown -R {1}:{1} {2}'.format(self.target.busybox, owner, database_dst), as_root=True)\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.gmail\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.gmail\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/app/src/main/java/com/arm/wa/uiauto/gmail/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.gmail;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\nimport com.arm.wa.uiauto.UiAutoUtils;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation implements ApplaunchInterface {\n\n    protected Bundle parameters;\n    protected String packageID;\n    protected String recipient;\n    protected String workdir_name;\n    protected boolean offlineMode;\n    protected String test_image;\n\n    private int networkTimeoutSecs = 30;\n    private long networkTimeout =  TimeUnit.SECONDS.toMillis(networkTimeoutSecs);\n\n    @Before\n    public void initialize() {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n        recipient = parameters.getString(\"recipient\");\n        workdir_name = parameters.getString(\"workdir_name\");\n        offlineMode = parameters.getBoolean(\"offline_mode\");\n        test_image = parameters.getString(\"test_image\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        runApplicationSetup();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        clickNewMail();\n        attachImage();\n        setToField(recipient);\n        setSubjectField();\n        setComposeField();\n        clickSendButton();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n    public void runApplicationSetup() throws Exception {\n        clearFirstRunDialogues();\n    }\n\n    // Sets the UiObject that marks the end of the application launch.\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject =\n                        mDevice.findObject(new UiSelector().className(\"android.widget.ImageButton\"));\n        return launchEndObject;\n    }\n\n    // Returns the launch command for the application.\n    public String getLaunchCommand() {\n        String launch_command;\n        launch_command = UiAutoUtils.createLaunchCommand(parameters);\n        return launch_command;\n    }\n\n    // Pass the workload parameters, used for applaunch\n    public void setWorkloadParameters(Bundle workload_parameters) {\n        parameters = workload_parameters;\n        packageID = getPackageID(parameters);\n    }\n\n    public void clearFirstRunDialogues() throws Exception {\n        // The first run dialogues vary on different devices so check if they are there and dismiss\n        UiObject gotItBox =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"welcome_tour_got_it\")\n                                         .className(\"android.widget.TextView\"));\n        if (gotItBox.exists()) {\n            gotItBox.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n\n        UiObject takeMeToBox =\n            mDevice.findObject(new UiSelector().textContains(\"Take me to Gmail\")\n                                         .className(\"android.widget.TextView\"));\n        if (takeMeToBox.exists()) {\n            takeMeToBox.clickAndWaitForNewWindow(uiAutoTimeout);\n\n            UiObject noEmailAddressMessage = mDevice.findObject(new UiSelector()\n                .textContains(\"Please add at least one email address.\")\n                .className(\"android.widget.TextView\"));\n\n            if (noEmailAddressMessage.exists()) {\n                throw new UiObjectNotFoundException(\"No email account setup on device. Set up at least one email address\");\n            }\n        }\n\n        // Dismiss fresh new look pop up messages\n        UiObject newLookMessageDismissButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"gm_dismiss_button\")\n                                         .className(\"android.widget.Button\"));\n        if(newLookMessageDismissButton.exists()) {\n            newLookMessageDismissButton.click();\n        }\n        //Dismiss secondary message also with same button\n        if(newLookMessageDismissButton.exists()) {\n            newLookMessageDismissButton.click();\n        }\n\n        // Dismiss google meet integration popup\n        UiObject googleMeetDismissPopUp =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"next_button\")\n                                         .className(\"android.widget.Button\"));\n\n        if (googleMeetDismissPopUp.exists()) {\n            googleMeetDismissPopUp.click();\n        }\n\n        // If we're in offline mode we don't need to worry about syncing, so we're done\n        if (offlineMode) {\n            return;\n        }\n\n        UiObject syncNowButton =\n            mDevice.findObject(new UiSelector().textContains(\"Sync now\")\n                                         .className(\"android.widget.Button\"));\n        if (syncNowButton.exists()) {\n            syncNowButton.clickAndWaitForNewWindow(uiAutoTimeout);\n            // On some devices we need to wait for a sync to occur after clearing the data\n            // We also need to sleep here since waiting for a new window is not enough\n            sleep(10);\n        }\n\n        // Wait an obnoxiously long period of time for the sync operation to finish\n        // If it still fails, then there is a problem with the app obtaining the data it needs\n        // Recommend restarting the phone and/or clearing the app data\n        UiObject gettingMessages =\n            mDevice.findObject(new UiSelector().textContains(\"Getting your messages\")\n                                               .className(\"android.widget.TextView\"));\n        UiObject waitingSync =\n            mDevice.findObject(new UiSelector().textContains(\"Waiting for sync\")\n                                               .className(\"android.widget.TextView\"));\n        if (!waitUntilNoObject(gettingMessages, networkTimeoutSecs*4) ||\n            !waitUntilNoObject(waitingSync, networkTimeoutSecs*4)) {\n            throw new UiObjectNotFoundException(\"Device cannot sync! Try rebooting or clearing app data\");\n        }\n\n        UiObject conversationView =\n            mDevice.findObject(new UiSelector().resourceIdMatches(packageID + \"conversation_list.*\"));\n        if (!conversationView.waitForExists(networkTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"conversationView\\\".\");\n        }\n\n        //Get rid of smart compose message on newer versions and return to home screen before ckickNewMail test\n        UiObject newMailButton =\n            getUiObjectByDescription(\"Compose\");\n        newMailButton.click();\n\n        UiObject smartComposeDismissButton = mDevice.findObject(new UiSelector().textContains(\"Got it\")\n                                                                                .className(\"android.widget.Button\"));\n        if(smartComposeDismissButton.exists()) {\n            smartComposeDismissButton.click();\n        }\n\n        // Return to conversation/home screen\n        mDevice.pressBack();\n        if(!conversationView.exists()) {\n           mDevice.pressBack();\n        }\n        if(!conversationView.exists()) {\n           mDevice.pressBack();\n        }\n    }\n\n    public void clickNewMail() throws Exception {\n        String testTag = \"click_new\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject newMailButton =\n            getUiObjectByDescription(\"Compose\");\n\n        logger.start();\n        newMailButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        logger.stop();\n    }\n\n    public void attachImage() throws Exception {\n        String testTag = \"attach_img\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject attachIcon =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"add_attachment\"));\n\n        logger.start();\n\n        attachIcon.click();\n        UiObject attachFile =\n            mDevice.findObject(new UiSelector().textContains(\"Attach file\")\n                                               .className(\"android.widget.TextView\"));\n        if (!attachFile.exists()){\n            attachFile =\n                mDevice.findObject(new UiSelector().descriptionContains(\"Attach file\")\n                                               .className(\"android.widget.TextView\"));\n        }\n        attachFile.clickAndWaitForNewWindow(uiAutoTimeout);\n\n        // Show Roots menu\n        UiObject rootMenu =\n            mDevice.findObject(new UiSelector().descriptionContains(\"Show root\"));\n        if (rootMenu.exists()){\n            rootMenu.click();\n        }\n\n        UiObject imagesEntry =\n            mDevice.findObject(new UiSelector().textContains(\"Images\")\n                                               .className(\"android.widget.TextView\"));\n        if (imagesEntry.waitForExists(uiAutoTimeout)) {\n            imagesEntry.click();\n\n            selectGalleryFolder(workdir_name);\n            selectGalleryFolder(workdir_name);\n\n            //Switch from grid view to menu view to display filename on larger screens\n            UiObject menuListButton = mDevice.findObject(new UiSelector().resourceId(\"com.android.documentsui:id/menu_list\")\n                                                                         .className(\"android.widget.TextView\"));\n            if (menuListButton.exists()) {\n                menuListButton.click();\n            }\n\n            UiObject imageButton = mDevice.findObject(new UiSelector().textContains(test_image)\n                                                                      .className(\"android.widget.TextView\"));\n\n            imageButton.click();\n            imageButton.waitUntilGone(uiAutoTimeout);\n        } else { // Use google photos as fallback\n            UiObject photos =\n                mDevice.findObject(new UiSelector().text(\"Photos\")\n                                                   .className(\"android.widget.TextView\"));\n\n            photos.click();\n\n            UiObject working_directory =\n                mDevice.findObject(new UiSelector().textContains(workdir_name)\n                                                   .className(\"android.widget.TextView\"));\n\n            working_directory.waitForExists (uiAutoTimeout);\n            working_directory.click();\n\n            //Click test image\n            UiObject imageFileButton =\n                mDevice.findObject(new UiSelector().descriptionContains(\"Photo\"));\n\n            imageFileButton.click();\n\n            UiObject accept = getUiObjectByText(\"DONE\");\n\n            if (accept.waitForExists (uiAutoTimeout)) {\n                accept.click();\n            }\n\n        }\n\n        logger.stop();\n    }\n\n    public void setToField(String recipient) throws Exception {\n        String testTag = \"text_to\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject toField = mDevice.findObject(new UiSelector().resourceId(packageID + \"to\"));\n        if (!toField.waitForExists(uiAutoTimeout)) {\n            toField = mDevice.findObject(new UiSelector().className(\"android.widget.EditText\"));\n        }\n\n        logger.start();\n        toField.setText(recipient);\n        mDevice.pressEnter();\n        logger.stop();\n    }\n\n    public void setSubjectField() throws Exception {\n        String testTag = \"text_subject\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject subjectField = getUiObjectByText(\"Subject\", \"android.widget.EditText\");\n        logger.start();\n        // Click on the subject field is required on some platforms to exit the To box cleanly\n        subjectField.click();\n        subjectField.setText(\"This is a test message\");\n        mDevice.pressEnter();\n        logger.stop();\n    }\n\n    public void setComposeField() throws Exception {\n        String testTag = \"text_body\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject composeField = mDevice.findObject(new UiSelector().textContains(\"Compose email\"));\n        if (!composeField.exists()){\n            composeField = mDevice.findObject(new UiSelector().descriptionContains(\"Compose email\"));\n        }\n        if (!composeField.exists()){\n            composeField = mDevice.findObject(new UiSelector().resourceId(packageID + \"wc_body_layout\" ))\n                                  .getChild(new UiSelector().className(\"android.widget.EditText\"));\n        }\n\n        logger.start();\n        composeField.legacySetText(\"This is a test composition\");\n        mDevice.pressEnter();\n        logger.stop();\n    }\n\n    public void clickSendButton() throws Exception {\n        String testTag = \"click_send\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject sendButton = getUiObjectByDescription(\"Send\");\n        logger.start();\n        sendButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        logger.stop();\n        sendButton.waitUntilGone(networkTimeoutSecs);\n    }\n}\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.gmail\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/gmail/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/googlemaps/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom wa import Parameter, ApkUiautoWorkload\nfrom wa.framework.exception import WorkloadError\n\n\nclass GoogleMaps(ApkUiautoWorkload):\n\n    name = 'googlemaps'\n    description = '''\n    A workload to perform standard navigation tasks with Google Maps. This workload searches\n    for known locations, pans and zooms around the map, and follows driving directions\n    along a route.\n\n    To run the workload in offline mode, ``databases.tar`` and ``files.tar`` archives are required.\n    In order to generate these files, Google Maps should first be operated from an\n    Internet-connected environment, and a region around Cambridge, England should be downloaded\n    for offline use. This region must include the landmarks used in the UIAutomator program,\n    which include Cambridge train station and Corpus Christi college.\n\n    Following this, the files of interest can be found in the ``databases`` and ``files``\n    subdirectories of the ``/data/data/com.google.android.apps.maps/`` directory. The contents\n    of these subdirectories can be archived into tarballs using commands such as\n    ``tar -cvf databases.tar -C /path/to/databases .``. These ``databases.tar`` and ``files.tar`` archives\n    should then be placed in the ``~/.workload_automation/dependencies/googlemaps`` directory on your\n    local machine, creating this if it does not already exist.\n\n    Known working APK version: 10.19.1\n    '''\n    package_names = ['com.google.android.apps.maps']\n\n    parameters = [\n        Parameter('offline_mode', kind=bool, default=False, description='''\n                  If set to ``True``, the workload will execute in offline mode.\n                  This mode requires root and makes use of a tarball of database\n                  files ``databases.tar`` and a tarball of auxiliary files ``files.tar``.\n                  These tarballs are extracted directly to the application's ``databases``\n                  and ``files`` directories respectively in ``/data/data/com.google.android.apps.maps/``.\n                  '''),\n    ]\n\n    @property\n    def requires_network(self):\n        return not self.offline_mode\n\n    @property\n    def requires_rerun(self):\n        # In offline mode we need to restart the application after modifying its data directory\n        return self.offline_mode\n\n    def __init__(self, target, **kwargs):\n        super(GoogleMaps, self).__init__(target, **kwargs)\n        if self.offline_mode:\n            self.deployable_assets = ['databases.tar', 'files.tar']\n            self.cleanup_assets = True\n\n    def initialize(self, context):\n        super(GoogleMaps, self).initialize(context)\n        if self.offline_mode and not self.target.is_rooted:\n            raise WorkloadError('This workload requires root to set up Google Maps for offline usage.')\n\n    def init_resources(self, context):\n        super(GoogleMaps, self).init_resources(context)\n        self.gui.uiauto_params['offline_mode'] = self.offline_mode\n\n    def setup_rerun(self):\n        super(GoogleMaps, self).setup_rerun()\n        package_data_dir = self.target.path.join(self.target.package_data_directory, self.package)\n        databases_src = self.target.path.join(self.target.working_directory, 'databases.tar')\n        databases_dst = self.target.path.join(package_data_dir, 'databases')\n        files_src = self.target.path.join(self.target.working_directory, 'files.tar')\n        files_dst = self.target.path.join(package_data_dir, 'files')\n        owner = self.target.execute(\"{} stat -c '%u' {}\".format(self.target.busybox, package_data_dir), as_root=True).strip()\n        self.target.execute('{} tar -xvf {} -C {}'.format(self.target.busybox, databases_src, databases_dst), as_root=True)\n        self.target.execute('{} tar -xvf {} -C {}'.format(self.target.busybox, files_src, files_dst), as_root=True)\n        self.target.execute('{0} chown -R {1}:{1} {2}'.format(self.target.busybox, owner, package_data_dir), as_root=True)\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 18\n    buildToolsVersion '25.0.0'\n    defaultConfig {\n        applicationId \"com.arm.wa.uiauto.googlemaps\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"com.arm.wa.uiauto.googlemaps.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(include: ['*.jar'], dir: 'libs')\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext: 'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.googlemaps\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"com.arm.wa.uiauto.googlemaps\"/>\n\n</manifest>\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/app/src/main/java/com/arm/wa/uiauto/UiAutomation.java",
    "content": "/*    Copyright 2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\npackage com.arm.wa.uiauto.googlemaps;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport android.support.test.runner.AndroidJUnit4;\n\nimport android.util.Log;\nimport android.view.KeyEvent;\n\n// Import the uiautomator libraries\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.Direction.DOWN;\nimport static com.arm.wa.uiauto.BaseUiAutomation.Direction.UP;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    protected Bundle parameters;\n    protected String packageID;\n    protected boolean offlineMode;\n\n    public static String TAG = \"googlemaps\";\n\n    @Before\n    public void initialize() throws Exception {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n        offlineMode = parameters.getBoolean(\"offline_mode\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        runApplicationSetup();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        // Search for and select Cambridge train station\n        search(\"Cambridge Station CBG, England\", \"search_omnibox_text_box\");\n        selectSearchResultContaining(\"[CBG]\");\n        dismissLocationTutorial();\n        sleep(3);\n\n        /** On newer version the location info at bottom of screen interferes with swiping tests so remove by \n        swiping down. Check if new version by seeing if view switcher is present **/\n        UiObject viewSwitcher =\n            mDevice.findObject(new UiSelector().className(\"android.widget.ViewSwitcher\"));\n        UiObject cambridgeTextView =\n            mDevice.findObject(new UiSelector().textContains(\"Cambridge\"));\n\n        if(!viewSwitcher.exists()) { // Version 10.19.1\n            cambridgeTextView.dragTo(0,getDisplayHeight(), 40);\n        } \n\n        // Pinch to zoom, scroll around\n        UiObject mapContainer = mDevice.findObject(new UiSelector().resourceId(packageID + \"mainmap_container\"));\n        uiDeviceSwipeDown(100);\n        sleep(1);\n        uiDeviceSwipeUp(200);\n        sleep(1);\n        uiObjectVertPinchIn(mapContainer, 100, 25);\n        sleep(1);\n        uiDeviceSwipeLeft(100);\n        uiDeviceSwipeUp(100);\n        uiObjectVertPinchOut(mapContainer, 100, 50);\n        sleep(3);\n\n        // On newer versions swipe the location info tab back up\n        if(!viewSwitcher.exists()) { // Version 10.19.1\n            cambridgeTextView.dragTo(0, getDisplayCentreHeight(), 40);\n        }\n\n        // Get directions from Cambridge train station to Corpus Christi college\n        getDirectionsFromLocation();\n        search(\"Corpus Christi, Cambridge, England\", \"directions_startpoint_textbox\");\n        selectSearchResultContaining(\"Corpus Christi College\");\n        sleep(3);\n\n        // View the steps for the route\n        viewRouteSteps();\n\n        // Preview the first three steps of the route\n        previewRoute();\n        for (int i = 0; i < 3; ++i) {\n            previewNextRouteStep();\n            sleep(1);\n        }\n\n        // Return to the normal map view\n        pressBack();\n        pressBack();\n        pressBack();\n    }\n\n    public void search(String query, String box) throws Exception {\n        UiObject searchBox = mDevice.findObject(new UiSelector().resourceId(packageID + box)\n                                                                .className(\"android.widget.EditText\"));\n        if (!searchBox.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find search box.\");\n        }\n        searchBox.click();\n\n        UiObject searchText = mDevice.findObject(new UiSelector().resourceId(packageID + \"search_omnibox_edit_text\")\n                                                                 .className(\"android.widget.EditText\"));\n        searchText.click();\n        searchText.setText(query);\n    }\n\n    public void selectSearchResultContaining(String str) throws Exception {\n        UiObject match = mDevice.findObject(new UiSelector().textContains(str)\n                                                            .className(\"android.widget.TextView\"));\n        if (!match.waitForExists(uiAutoTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find search result containing \\\"\" + str + \"\\\".\");\n        }\n        match.click();\n    }\n\n    public void getDirectionsFromLocation() throws Exception {\n        UiObject directions = mDevice.findObject(new UiSelector().resourceId(packageID + \"placepage_directions_button\"));\n        if (!directions.exists()){\n            directions = mDevice.findObject(new UiSelector().textContains(\"DIRECTIONS\"));\n        }\n        directions.clickAndWaitForNewWindow(uiAutoTimeout);\n    }\n\n    public void dismissLocationTutorial() throws Exception {\n        UiObject gotItButton = mDevice.findObject(new UiSelector().resourceId(packageID + \"tutorial_pull_up_got_it\"));\n        if (gotItButton.waitForExists(uiAutoTimeout)) {\n            gotItButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n        sleep(3);\n    }\n\n    public void viewRouteSteps() throws Exception {\n        UiObject steps = mDevice.findObject(new UiSelector().textContains(\"STEPS & MORE\")\n                                                            .className(\"android.widget.TextView\"));\n        if (steps.exists()){\n            steps.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n    }\n\n    public void previewRoute() throws Exception {\n        UiObject preview = mDevice.findObject(new UiSelector().resourceId(packageID + \"start_button\"));\n        preview.clickAndWaitForNewWindow(uiAutoTimeout);\n    }\n\n    public void previewNextRouteStep() throws Exception {\n        UiObject next = getUiObjectByDescription(\"Show next\", \"android.widget.ImageView\");\n        next.click();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n    public void runApplicationSetup() throws Exception {\n        // Dismiss 'Get the most from Google Maps' splash screen\n        UiObject skipButton;\n        skipButton = mDevice.findObject(new UiSelector().textContains(\"Skip\")\n                                                        .className(\"android.widget.Button\"));\n        if (skipButton.exists()) {\n            skipButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n\n        // Dismiss a dialog regarding real-time traffic updates\n        UiObject turnOffButton;\n        turnOffButton = mDevice.findObject(new UiSelector().textContains(\"TURN OFF\")\n                                                           .className(\"android.widget.TextView\"));\n        if (turnOffButton.exists()) {\n            turnOffButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n\n        // Dismiss a dialog regarding the availability of location services\n        UiObject cancelButton;\n        cancelButton = mDevice.findObject(new UiSelector().textContains(\"CANCEL\")\n                                                          .className(\"android.widget.Button\"));\n        if (cancelButton.exists()) {\n            cancelButton.clickAndWaitForNewWindow(uiAutoTimeout);\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wlauto dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\nrm -f ../com.arm.wa.uiauto.googlemaps\nif [[ -f app/build/outputs/apk/debug/com.arm.wa.uiauto.googlemaps.apk ]]; then\n    cp app/build/outputs/apk/debug/com.arm.wa.uiauto.googlemaps.apk ../com.arm.wa.uiauto.googlemaps.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/googlemaps/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/googlephotos/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.framework.exception import ValidationError\nfrom wa.utils.types import list_of_strs\nfrom wa.utils.misc import unique\n\n\nclass Googlephotos(ApkUiautoWorkload):\n\n    name = 'googlephotos'\n    package_names = ['com.google.android.apps.photos']\n    description = '''\n    A workload to perform standard productivity tasks with Google Photos. The workload carries out\n    various tasks, such as browsing images, performing zooms, and post-processing the image.\n\n    Test description:\n\n    1. Four images are copied to the target\n    2. The application is started in offline access mode\n    3. Gestures are performed to pinch zoom in and out of the selected image\n    4. The colour of a selected image is edited by selecting the colour menu, incrementing the\n       colour, resetting the colour and decrementing the colour using the seek bar.\n    5. A crop test is performed on a selected image.  UiAutomator does not allow the selection of\n       the crop markers so the image is tilted positively, reset and then tilted negatively to get a\n       similar cropping effect.\n    6. A rotate test is performed on a selected image, rotating anticlockwise 90 degrees, 180\n       degrees and 270 degrees.\n\n    Known working APK version: 4.53.0.316914374\n    '''\n\n    default_test_images = [\n        'uxperf_1200x1600.png', 'uxperf_1600x1200.jpg',\n        'uxperf_2448x3264.png', 'uxperf_3264x2448.jpg',\n    ]\n\n    parameters = [\n        Parameter('test_images', kind=list_of_strs, default=default_test_images,\n                  constraint=lambda x: len(unique(x)) == 4,\n                  description='''\n                  A list of four JPEG and/or PNG files to be pushed to the target.\n                  Absolute file paths may be used but tilde expansion must be escaped.\n                  '''),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(Googlephotos, self).__init__(target, **kwargs)\n        self.deployable_assets = self.test_images\n\n    def init_resources(self, context):\n        super(Googlephotos, self).init_resources(context)\n        # Only accept certain image formats\n        for image in self.test_images:\n            if os.path.splitext(image.lower())[1] not in ['.jpg', '.jpeg', '.png']:\n                raise ValidationError('{} must be a JPEG or PNG file'.format(image))\n\n    def deploy_assets(self, context):\n        super(Googlephotos, self).deploy_assets(context)\n        # Create a subfolder for each test_image named ``wa-[1-4]``\n        # Move each image into its subfolder\n        # This is to guarantee ordering and allows the workload to select a specific\n        # image by subfolder, as filenames are not shown easily within the app\n        d = self.target.working_directory\n        e = self.target.external_storage\n\n        file_list = []\n\n        for i, f in enumerate(self.test_images):\n            orig_file_path = self.target.path.join(d, f)\n            new_dir = self.target.path.join(e, 'wa', 'wa-{}'.format(i + 1))\n            new_file_path = self.target.path.join(new_dir, f)\n\n            self.target.execute('mkdir -p {}'.format(new_dir))\n            self.target.execute('cp {} {}'.format(orig_file_path, new_file_path))\n            self.target.execute('rm {}'.format(orig_file_path))\n            file_list.append(new_file_path)\n        self.deployed_assets = file_list\n        # Force rescan\n        self.target.refresh_files(self.deployed_assets)\n\n    def remove_assets(self, context):\n        for asset in self.deployed_assets:\n            self.target.remove(os.path.dirname(asset))\n        self.target.refresh_files(self.deployed_assets)\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.googlephotos\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.googlephotos\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/app/src/main/java/com/arm/wa/uiauto/googlephotos/UiAutomation.java",
    "content": "/*    Copyright 2014-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.googlephotos;\n\nimport android.content.Intent;\nimport android.graphics.Rect;\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\n\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureTestParams;\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureType;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.UiAutoUtils;\nimport com.arm.wa.uiauto.ActionLogger;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.TimeUnit;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation implements ApplaunchInterface {\n\n    private long viewTimeout = TimeUnit.SECONDS.toMillis(10);\n\n    protected Bundle parameters;\n    protected String packageID;\n\n    @Before\n    public void initialize(){\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n    }\n\n    @Test\n    public void setup() throws Exception{\n        runApplicationSetup();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        selectGalleryFolder(\"wa-1\");\n        selectFirstImage();\n        gesturesTest();\n        navigateUp();\n\n        selectGalleryFolder(\"wa-2\");\n        selectFirstImage();\n        editPhotoColorTest();\n        closeAndReturn(true);\n        navigateUp();\n\n        selectGalleryFolder(\"wa-3\");\n        selectFirstImage();\n        cropPhotoTest();\n        closeAndReturn(true);\n        navigateUp();\n\n        selectGalleryFolder(\"wa-4\");\n        selectFirstImage();\n        rotatePhotoTest();\n        closeAndReturn(true);\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n\n     // Get application parameters and clear the initial run dialogues of the application launch.\n    public void runApplicationSetup() throws Exception {\n        sleep(5); // Pause while splash screen loads\n        setScreenOrientation(ScreenOrientation.NATURAL);\n\n        // Clear the initial run dialogues of the application launch.\n        dismissWelcomeView();\n        closePromotionPopUp();\n        closeMissingOutPopUp();\n    }\n\n    // Returns the launch command for the application.\n    public String getLaunchCommand() {\n        String launch_command;\n        launch_command = UiAutoUtils.createLaunchCommand(parameters);\n        return launch_command;\n    }\n\n    // Pass the workload parameters, used for applaunch\n    public void setWorkloadParameters(Bundle workload_parameters) {\n        parameters = workload_parameters;\n        packageID = getPackageID(parameters);\n    }\n\n    // Sets the UiObject that marks the end of the application launch.\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject = mDevice.findObject(new UiSelector().textContains(\"Photos\")\n                                          .className(\"android.widget.TextView\"));\n        return launchEndObject;\n    }\n\n    public void dismissWelcomeView() throws Exception {\n        // Click through the first two pages and make sure that we don't sign\n        // in to our google account. This ensures the same set of photographs\n        // are placed in the camera directory for each run.\n        UiObject getStartedButton =\n            mDevice.findObject(new UiSelector().textContains(\"Get started\")\n                                               .className(\"android.widget.Button\"));\n        if (getStartedButton.waitForExists(viewTimeout)) {\n            getStartedButton.click();\n        }\n\n        // A network connection is not required for this workload. However,\n        // when the Google Photos app is invoked from the multiapp workload a\n        // connection is required for sharing content. Handle the different UI\n        // pathways when dismissing welcome views here.\n        UiObject doNotSignInButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"dont_sign_in_button\"));\n\n        UiObject accountName =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"name\")\n                                               .className(\"android.widget.TextView\"));\n        if (doNotSignInButton.exists()) {\n            doNotSignInButton.click();\n        }\n        else if (accountName.exists()) {\n            accountName.click();\n            clickUiObject(BY_TEXT, \"Use without an account\", \"android.widget.TextView\", true);\n        }\n        //Some devices get popup asking for confirmation to not use backup.\n        UiObject keepBackupOff =\n        mDevice.findObject(new UiSelector().textContains(\"Keep Off\")\n                                           .className(\"android.widget.Button\"));\n        if (keepBackupOff.exists()){\n            keepBackupOff.click();\n        }\n\n        UiObject nextButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"next_button\")\n                                               .className(\"android.widget.ImageView\"));\n        if (nextButton.exists()) {\n            nextButton.clickAndWaitForNewWindow();\n        }\n    }\n\n    public void closePromotionPopUp() throws Exception {\n        UiObject promoCloseButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"promo_close_button\"));\n        if (promoCloseButton.exists()) {\n            promoCloseButton.click();\n        }\n    }\n\n    public void closeMissingOutPopUp() throws Exception {\n        UiObject missingPopup =\n               mDevice.findObject(new UiSelector().textContains(\"Not now\"));\n        if (missingPopup.waitForExists(3000)) {\n            missingPopup.click();\n        }\n    }\n\n    // Helper to click on the first image\n    public void selectFirstImage() throws Exception {\n        UiObject photo =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"recycler_view\")\n                                               .childSelector(new UiSelector()\n                                               .index(1)));\n        if (photo.exists()) {\n            photo.click();\n        } else {\n            // On some versions of the app a non-zero index is used for the\n            // photographs position while on other versions a zero index is used.\n            // Try both possiblities before throwing an exception.\n            photo =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"recycler_view\")\n                                                   .childSelector(new UiSelector()\n                                                   .index(0)));\n            photo.click();\n        }\n    }\n\n    // Helper that accepts, closes and navigates back to application home screen after an edit operation.\n    // dontsave - True will discard the image. False will save the image\n    public void closeAndReturn(final boolean dontsave) throws Exception {\n        long timeout =  TimeUnit.SECONDS.toMillis(3);\n\n        UiObject accept =\n            mDevice.findObject(new UiSelector().description(\"Accept\"));\n        UiObject done =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"cpe_save_button\")\n                                               .textContains(\"Done\"));\n\n        // On some edit operations we can either confirm an edit with \"Accept\", \"DONE\" or neither.\n        if (accept.waitForExists(timeout)) {\n            accept.click();\n        } else if (done.waitForExists(timeout)) {\n            done.click();\n        }\n\n        if (dontsave) {\n            clickUiObject(BY_DESC, \"Close editor\", \"android.widget.ImageView\");\n\n            UiObject discard = getUiObjectByText(\"DISCARD\", \"android.widget.Button\");\n            discard.waitForExists(viewTimeout);\n            discard.click();\n        } else {\n            UiObject save = getUiObjectByText(\"SAVE\", \"android.widget.TextView\");\n            save.waitForExists(viewTimeout);\n            save.click();\n        }\n    }\n\n    public void navigateUp() throws Exception {\n        // Navigate up to go to folder\n        UiObject navigateUpButton =\n            clickUiObject(BY_DESC, \"Navigate Up\", \"android.widget.ImageButton\", true);\n        // Navigate up again to go to gallery - if it exists\n        if (navigateUpButton.exists()) {\n            navigateUpButton.clickAndWaitForNewWindow();\n        }\n    }\n\n    private void gesturesTest() throws Exception {\n        String testTag = \"gesture\";\n\n        // Perform a range of swipe tests while browsing photo gallery\n        LinkedHashMap<String, GestureTestParams> testParams = new LinkedHashMap<String, GestureTestParams>();\n        testParams.put(\"pinch_out\", new GestureTestParams(GestureType.PINCH, PinchType.OUT, 100, 50));\n        testParams.put(\"pinch_in\", new GestureTestParams(GestureType.PINCH, PinchType.IN, 100, 50));\n\n        Iterator<Entry<String, GestureTestParams>> it = testParams.entrySet().iterator();\n\n        while (it.hasNext()) {\n            Map.Entry<String, GestureTestParams> pair = it.next();\n            GestureType type = pair.getValue().gestureType;\n            PinchType pinch = pair.getValue().pinchType;\n            int steps = pair.getValue().steps;\n            int percent = pair.getValue().percent;\n\n            UiObject view =\n                mDevice.findObject(new UiSelector().enabled(true));\n            if (!view.waitForExists(viewTimeout)) {\n                throw new UiObjectNotFoundException(\"Could not find \\\"photo view\\\".\");\n            }\n\n            String runName = String.format(testTag + \"_\" + pair.getKey());\n            ActionLogger logger = new ActionLogger(runName, parameters);\n            logger.start();\n\n            switch (type) {\n                case PINCH:\n                    uiObjectVertPinch(view, pinch, steps, percent);\n                    break;\n                default:\n                    break;\n            }\n\n            logger.stop();\n        }\n    }\n\n    public enum Position { LEFT, RIGHT, CENTRE };\n\n    private class PositionPair {\n        private Position start;\n        private Position end;\n\n        PositionPair(final Position start, final Position end) {\n            this.start = start;\n            this.end = end;\n        }\n    }\n\n    private void editPhotoColorTest() throws Exception {\n        long timeout =  TimeUnit.SECONDS.toMillis(3);\n        // To improve travel accuracy perform the slide bar operation slowly\n        final int steps = 100;\n\n        String testTag = \"edit\";\n\n        // Perform a range of swipe tests while browsing photo gallery\n        LinkedHashMap<String, PositionPair> testParams = new LinkedHashMap<String, PositionPair>();\n        testParams.put(\"color_increment\", new PositionPair(Position.CENTRE, Position.RIGHT));\n        testParams.put(\"color_reset\", new PositionPair(Position.RIGHT, Position.CENTRE));\n        testParams.put(\"color_decrement\", new PositionPair(Position.CENTRE, Position.LEFT));\n\n        Iterator<Entry<String, PositionPair>> it = testParams.entrySet().iterator();\n\n        clickUiObject(BY_ID, packageID + \"edit\", \"android.widget.ImageView\");\n        UiObject enhance =\n            mDevice.findObject(new UiSelector().description(\"Enhance photo\"));\n        if (enhance.waitForExists(timeout)){\n            enhance.click();\n        }\n\n        // Manage potential different spelling of UI element\n        UiObject editCol =\n            mDevice.findObject(new UiSelector().textMatches(\"Colou?r\"));\n        if (editCol.waitForExists(timeout)) {\n            editCol.click();\n        } else {\n            UiObject adjustTool =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"cpe_adjustments_tool\")\n                                                   .className(\"android.widget.ImageView\"));\n            if (adjustTool.waitForExists(timeout)){\n                adjustTool.click();\n            } else {\n                throw new UiObjectNotFoundException(String.format(\"Could not find Color/Colour adjustment\"));\n            }\n        }\n\n        UiObject seekBar =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"cpe_strength_seek_bar\")\n                                               .className(\"android.widget.SeekBar\"));\n        if (!(seekBar.exists())){\n            seekBar =\n            mDevice.findObject(new UiSelector().resourceIdMatches(\".*/cpe_adjustments_section_slider\")\n                                               .className(\"android.widget.SeekBar\").descriptionMatches(\"Colou?r\"));\n        }\n\n        while (it.hasNext()) {\n            Map.Entry<String, PositionPair> pair = it.next();\n            Position start = pair.getValue().start;\n            Position end = pair.getValue().end;\n\n            String runName = String.format(testTag + \"_\" + pair.getKey());\n            ActionLogger logger = new ActionLogger(runName, parameters);\n\n            logger.start();\n            seekBarTest(seekBar, start, end, steps);\n            logger.stop();\n        }\n    }\n\n    private void cropPhotoTest() throws Exception {\n        String testTag = \"crop\";\n\n        // To improve travel accuracy perform the slide bar operation slowly\n        final int steps = 100;\n\n        // Perform a range of swipe tests while browsing photo gallery\n        LinkedHashMap<String, Position> testParams = new LinkedHashMap<String, Position>();\n        testParams.put(\"tilt_positive\", Position.LEFT);\n        testParams.put(\"tilt_reset\", Position.RIGHT);\n        testParams.put(\"tilt_negative\", Position.RIGHT);\n\n        Iterator<Entry<String, Position>> it = testParams.entrySet().iterator();\n\n        clickCropRotateButton();\n\n        UiObject straightenSlider =\n            getUiObjectByResourceId(packageID + \"cpe_straighten_slider\");\n\n        while (it.hasNext()) {\n            Map.Entry<String, Position> pair = it.next();\n            Position pos = pair.getValue();\n\n            String runName = String.format(testTag + \"_\" + pair.getKey());\n            ActionLogger logger = new ActionLogger(runName, parameters);\n\n            logger.start();\n            slideBarTest(straightenSlider, pos, steps);\n            logger.stop();\n        }\n    }\n\n    private void rotatePhotoTest() throws Exception {\n        String testTag = \"rotate\";\n\n        String[] subTests = {\"90\", \"180\", \"270\"};\n\n        clickCropRotateButton();\n\n        UiObject rotate =\n            getUiObjectByResourceId(packageID + \"cpe_rotate_90\");\n\n        for (String subTest : subTests) {\n            String runName = String.format(testTag + \"_\" + subTest);\n            ActionLogger logger = new ActionLogger(runName, parameters);\n\n            logger.start();\n            rotate.click();\n            logger.stop();\n        }\n    }\n\n    private void clickCropRotateButton() throws Exception {\n        clickUiObject(BY_ID, packageID + \"edit\", \"android.widget.ImageView\");\n        //For newer version of app\n        UiObject cropRotatebutton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"editor_tool_item_icon\")\n                                               .descriptionContains(\"Crop and rotate photo\"));\n        if (cropRotatebutton.exists()) {\n            cropRotatebutton.click();\n        } else {\n            clickUiObject(BY_ID, packageID + \"cpe_crop_tool\", \"android.widget.ImageView\");\n        }\n    }\n\n    // Helper to slide the seekbar during photo edit.\n    private void seekBarTest(final UiObject view, final Position start, final Position end, final int steps) throws Exception {\n        final int SWIPE_MARGIN_LIMIT = 5;\n        Rect rect = view.getVisibleBounds();\n        int startX, endX;\n\n        switch (start) {\n            case CENTRE:\n                startX = rect.centerX();\n                break;\n            case LEFT:\n                startX = rect.left + SWIPE_MARGIN_LIMIT;\n                break;\n            case RIGHT:\n                startX = rect.right - SWIPE_MARGIN_LIMIT;\n                break;\n            default:\n                startX = 0;\n                break;\n        }\n\n        switch (end) {\n            case CENTRE:\n                endX = rect.centerX();\n                break;\n            case LEFT:\n                endX = rect.left + SWIPE_MARGIN_LIMIT;\n                break;\n            case RIGHT:\n                endX = rect.right - SWIPE_MARGIN_LIMIT;\n                break;\n            default:\n                endX = 0;\n                break;\n        }\n\n        mDevice.drag(startX, rect.centerY(), endX, rect.centerY(), steps);\n    }\n\n    // Helper to slide the slidebar during photo edit.\n    private void slideBarTest(final UiObject view, final Position pos, final int steps) throws Exception {\n        final int SWIPE_MARGIN_LIMIT = 5;\n        Rect rect = view.getBounds();\n\n        switch (pos) {\n            case LEFT:\n                mDevice.drag(rect.left + SWIPE_MARGIN_LIMIT, rect.centerY(),\n                             rect.left + (rect.width() / 4), rect.centerY(),\n                             steps);\n                break;\n            case RIGHT:\n                mDevice.drag(rect.right - SWIPE_MARGIN_LIMIT, rect.centerY(),\n                             rect.right - (rect.width() / 4), rect.centerY(),\n                             steps);\n                break;\n            default:\n                break;\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.googlephotos\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Thu Jun 08 14:26:39 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/googlephotos/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/__init__.py",
    "content": "#    Copyright 2014-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom wa import ApkUiautoWorkload, Parameter\n\n\nclass Googleplaybooks(ApkUiautoWorkload):\n\n    name = 'googleplaybooks'\n    package_names = ['com.google.android.apps.books']\n\n    description = '''\n    A workload to perform standard productivity tasks with googleplaybooks.\n    This workload performs various tasks, such as searching for a book title\n    online, browsing through a book, adding and removing notes, word searching,\n    and querying information about the book.\n\n    Test description:\n    1. Open Google Play Books application\n    2. Dismisses sync operation (if applicable)\n    3. Searches for a book title\n    4. Adds books to library if not already present\n    5. Opens 'My Library' contents\n    6. Opens selected book\n    7. Gestures are performed to swipe between pages and pinch zoom in and out of a page\n    8. Selects a specified chapter based on page number from the navigation view\n    9. Selects a word in the centre of screen and adds a test note to the page\n    10. Removes the test note from the page (clean up)\n    11. Searches for the number of occurrences of a common word throughout the book\n    12. Switches page styles from 'Day' to 'Night' to 'Sepia' and back to 'Day'\n    13. Uses the 'About this book' facility on the currently selected book\n\n    NOTE: This workload requires a network connection (ideally, wifi) to run,\n          a Google account to be setup on the device, and payment details for the account.\n          Free books require payment details to have been setup otherwise it fails.\n          Tip: Install the 'Google Opinion Rewards' app to bypass the need to enter valid\n          card/bank detail.\n\n    Known working APK version: 3.15.5\n    '''\n\n    parameters = [\n        Parameter('search_book_title', kind=str, default='Nikola Tesla: Imagination and the Man That Invented the 20th Century',\n                  description=\"\"\"\n                  The book title to search for within Google Play Books archive.\n                  The book must either be already in the account's library, or free to purchase.\n                  \"\"\"),\n        Parameter('library_book_title', kind=str, default='Nikola Tesla',\n                  description=\"\"\"\n                  The book title to search for within My Library.\n                  The Library name can differ (usually shorter) to the Store name.\n                  If left blank, the ``search_book_title`` will be used.\n                  \"\"\"),\n        Parameter('select_chapter_page_number', kind=int, default=4,\n                  description=\"\"\"\n                  The Page Number to search for within a selected book's Chapter list.\n                  Note: Accepts integers only.\n                  \"\"\"),\n        Parameter('search_word', kind=str, default='the',\n                  description=\"\"\"\n                  The word to search for within a selected book.\n                  Note: Accepts single words only.\n                  \"\"\"),\n        Parameter('account', kind=str, mandatory=False,\n                  description=\"\"\"\n                  If you are running this workload on a device which has more than one\n                  Google account setup, then this parameter is used to select which account\n                  to select when prompted.\n                  The account requires the book to have already been purchased or payment details\n                  already associated with the account.\n                  If omitted, the first account in the list will be selected if prompted.\n                  \"\"\"),\n    ]\n\n    # This workload relies on the internet so check that there is a working\n    # internet connection\n    requires_network = True\n\n    def init_resources(self, context):\n        super(Googleplaybooks, self).init_resources(context)\n        self.gui.uiauto_params['search_book_title'] = self.search_book_title\n        # If library_book_title is blank, set it to the same as search_book_title\n        if not self.library_book_title:  # pylint: disable=access-member-before-definition\n            self.library_book_title = self.search_book_title  # pylint: disable=attribute-defined-outside-init\n        self.gui.uiauto_params['library_book_title'] = self.library_book_title\n        self.gui.uiauto_params['chapter_page_number'] = self.select_chapter_page_number\n        self.gui.uiauto_params['search_word'] = self.search_word\n        self.gui.uiauto_params['account'] = self.account\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.googleplaybooks\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.googleplaybooks\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/app/src/main/java/com/arm/wa/uiauto/googleplaybooks/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.googleplaybooks;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObject2;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiWatcher;\nimport android.support.test.uiautomator.By;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureTestParams;\nimport com.arm.wa.uiauto.UxPerfUiAutomation.GestureType;\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ApplaunchInterface;\nimport com.arm.wa.uiauto.ActionLogger;\nimport com.arm.wa.uiauto.UiAutoUtils;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Map.Entry;\nimport java.util.concurrent.TimeUnit;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation implements ApplaunchInterface {\n\n    private int viewTimeoutSecs = 10;\n    private long viewTimeout =  TimeUnit.SECONDS.toMillis(viewTimeoutSecs);\n\n    protected Bundle parameters;\n    protected String packageID;\n\n    protected String searchBookTitle;\n    protected String libraryBookTitle;\n    protected int chapterPageNumber;\n    protected String searchWord;\n    protected String noteText;\n\n    @Before\n    public void initialize() {\n        this.uiAutoTimeout = TimeUnit.SECONDS.toMillis(8);\n\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n\n        searchBookTitle = parameters.getString(\"search_book_title\");\n        libraryBookTitle = parameters.getString(\"library_book_title\");\n        chapterPageNumber = parameters.getInt(\"chapter_page_number\");\n        searchWord = parameters.getString(\"search_word\");\n        noteText = \"This is a test note\";\n    }\n\n    @Test\n    public void setup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        runApplicationSetup();\n\n        searchForBook(searchBookTitle);\n        addToLibrary();\n        openMyLibrary();\n\n        UiWatcher pageSyncPopUpWatcher = createPopUpWatcher();\n        registerWatcher(\"pageSyncPopUp\", pageSyncPopUpWatcher);\n        runWatchers();\n    }\n    @Test\n    public void runWorkload() throws Exception {\n        openBook(libraryBookTitle);\n        selectChapter(chapterPageNumber);\n        gesturesTest();\n        addNote(noteText);\n        removeNote();\n        searchForWord(searchWord);\n        switchPageStyles();\n        aboutBook(libraryBookTitle);\n        pressBack();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        removeWatcher(\"pageSyncPopUp\");\n        unsetScreenOrientation();\n    }\n\n    // Get application parameters and clear the initial run dialogues of the application launch.\n    public void runApplicationSetup() throws Exception {\n        String account = parameters.getString(\"account\");\n        chooseAccount(account);\n        clearFirstRunDialogues();\n        dismissSendBooksAsGiftsDialog();\n        dismissSync();\n    }\n\n    // Returns the launch command for the application.\n    public String getLaunchCommand() {\n        String launch_command;\n        launch_command = UiAutoUtils.createLaunchCommand(parameters);\n        return launch_command;\n    }\n\n    // Pass the workload parameters, used for applaunch\n    public void setWorkloadParameters(Bundle workload_parameters) {\n        parameters = workload_parameters;\n        packageID = getPackageID(parameters);\n    }\n\n    // Sets the UiObject that marks the end of the application launch.\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject = mDevice.findObject(new UiSelector()\n                                         .className(\"android.widget.TextView\")\n                                         .textContains(\"Library\"));\n        return launchEndObject;\n    }\n\n    // If the device has more than one account setup, a prompt appears\n    // In this case, select the first account in the list, unless `account`\n    // has been specified as a parameter, otherwise select `account`.\n    private void chooseAccount(String account) throws Exception {\n        UiObject accountPopup =\n            mDevice.findObject(new UiSelector().textContains(\"Choose an account\")\n                                               .className(\"android.widget.TextView\"));\n        if (accountPopup.exists()) {\n            if (\"None\".equals(account)) {\n                // If no account has been specified, pick the first entry in the list\n                UiObject list =\n                    mDevice.findObject(new UiSelector().className(\"android.widget.ListView\"));\n                UiObject first = list.getChild(new UiSelector().index(0));\n                if (!first.exists()) {\n                    // Some devices are not zero indexed. If 0 doesnt exist, pick 1\n                    first = list.getChild(new UiSelector().index(1));\n                }\n                first.click();\n            } else {\n                // Account specified, select that\n                clickUiObject(BY_TEXT, account, \"android.widget.CheckedTextView\");\n            }\n            // Click OK to proceed\n            UiObject ok =\n                mDevice.findObject(new UiSelector().textContains(\"OK\")\n                                                   .className(\"android.widget.Button\")\n                                                   .enabled(true));\n            ok.clickAndWaitForNewWindow();\n        }\n    }\n\n    // If there is no sample book in My library we are prompted to choose a\n    // book the first time application is run. Try to skip the screen or\n    // pick a random sample book.\n    private void clearFirstRunDialogues() throws Exception {\n        UiObject startButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"start_button\"));\n        // First try and skip the sample book selection\n        if (startButton.exists()) {\n            startButton.click();\n        }\n\n        UiObject endButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"end_button\"));\n        // Click next button if it exists\n        if (endButton.exists()) {\n            endButton.click();\n\n            // Select a random sample book to add to My library\n            sleep(1);\n            tapDisplayCentre();\n            sleep(1);\n\n            // Click done button (uses same resource-id)\n            endButton.click();\n        }\n    }\n\n    private void dismissSendBooksAsGiftsDialog() throws Exception {\n        UiObject gotIt =\n            mDevice.findObject(new UiSelector().textContains(\"GOT IT\"));\n        if (gotIt.exists()) {\n            gotIt.click();\n        }\n    }\n\n    private void dismissSync() throws Exception {\n        UiObject keepSyncOff =\n            mDevice.findObject(new UiSelector().textContains(\"Keep sync off\")\n                                               .className(\"android.widget.Button\"));\n        if (keepSyncOff.exists()) {\n            keepSyncOff.click();\n        }\n    }\n\n    // Searches for a \"free\" or \"purchased\" book title in Google play\n    private void searchForBook(final String bookTitle) throws Exception {\n        UiObject search =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"menu_search\"));\n        if (!search.exists()) {\n            search =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"search_box_idle_text\"));\n        }\n        search.click();\n\n        UiObject searchText =\n            mDevice.findObject(new UiSelector().textContains(\"Search\")\n                                               .className(\"android.widget.EditText\"));\n        searchText.setText(bookTitle);\n        pressEnter();\n\n        UiObject resultList =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.vending:id/search_results_list\"));\n        if (!resultList.waitForExists(viewTimeout)) {\n            resultList =\n            mDevice.findObject(new UiSelector().scrollable(true));\n            if (!resultList.waitForExists(viewTimeout)) {\n                throw new UiObjectNotFoundException(\"Could not find search results list.\");\n            }\n        }\n\n        // Create a selector so that we can search for siblings of the desired\n        // book that contains a \"free\" or \"purchased\" book identifier\n        // For some reason regex matching seem to be failing so explicitally check for both.\n        UiObject labelPurchased =\n            mDevice.findObject(new UiSelector().descriptionContains(String.format(\"Book: %s\", bookTitle))\n                                               .descriptionContains(\"Purchased\"));\n        UiObject labelFree =\n            mDevice.findObject(new UiSelector().descriptionContains(String.format(\"Book: %s\", bookTitle))\n                                            .descriptionContains(\"Free\"));\n\n        final int maxSearchTime = 30;\n        int searchTime = maxSearchTime;\n\n        while (!(labelPurchased.exists() || labelFree.exists())) {\n            if (searchTime > 0) {\n                uiDeviceSwipeDown(100);\n                sleep(1);\n                searchTime--;\n            } else {\n                throw new UiObjectNotFoundException(\n                        \"Exceeded maximum search time (\" + maxSearchTime  +\n                        \" seconds) to find book \\\"\" + bookTitle + \"\\\"\");\n            }\n        }\n\n        // Click on either the first \"free\" or \"purchased\" book found that\n        // matches the book title\n        if (labelPurchased.exists()) {\n            labelPurchased.click();\n        }\n        else {\n            labelFree.click();\n        }\n    }\n\n    private void addToLibrary() throws Exception {\n        UiObject add =\n            mDevice.findObject(new UiSelector().textContains(\"ADD TO LIBRARY\")\n                                               .className(\"android.widget.Button\"));\n        if (add.exists()) {\n            // add to My Library and opens book by default\n            add.click();\n            clickUiObject(BY_TEXT, \"BUY\", \"android.widget.Button\", true);\n        } else {\n            // opens book\n            clickUiObject(BY_TEXT, \"READ\", \"android.widget.Button\");\n        }\n\n        waitForPage();\n\n        UiObject navigationButton =\n            mDevice.findObject(new UiSelector().description(\"Navigate up\"));\n\n        // Return to main app window\n        pressBack();\n\n        // On some devices screen ordering is not preserved so check for\n        // navigation button to determine current screen\n        if (navigationButton.exists()) {\n            pressBack();\n            pressBack();\n        }\n    }\n\n    private void openMyLibrary() throws Exception {\n        String testTag = \"open_library\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        logger.start();\n        //clickUiObject(BY_DESC, \"Show navigation drawer\");\n        // To correctly find the UiObject we need to specify the index also here\n        UiObject myLibrary =\n            mDevice.findObject(new UiSelector().className(\"android.widget.TextView\")\n                                               .textMatches(\".*[lL]ibrary\")\n                                               .index(3));\n        if (!myLibrary.exists()) {\n            myLibrary =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"jump_text\"));\n        }\n        if (!myLibrary.exists()) {\n            myLibrary =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"bottom_my_library\"));\n        }\n\t\tmyLibrary.clickAndWaitForNewWindow(uiAutoTimeout);\n\n        // Switch to books tab on newer versions\n        UiObject books_tab =\n            mDevice.findObject(new UiSelector().className(\"android.widget.TextView\")\n                                               .textMatches(\"BOOKS\"));\n        if (books_tab.exists()){\n            books_tab.click();\n        }\n\t\tlogger.stop();\n\t}\n\n    private void openBook(final String bookTitle) throws Exception {\n        String testTag = \"open_book\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        long maxWaitTimeSeconds = 120;\n        long maxWaitTime = TimeUnit.SECONDS.toMillis(maxWaitTimeSeconds);\n\n        UiSelector bookSelector =\n            new UiSelector().text(bookTitle)\n                            .className(\"android.widget.TextView\");\n        UiObject book = mDevice.findObject(bookSelector);\n        // Check that books are sorted by time added to library. This way we\n        // can assume any newly downloaded books will be visible on the first\n        // screen.\n        mDevice.findObject(By.res(packageID + \"menu_sort\")).click();\n        clickUiObject(BY_TEXT, \"Recent\", \"android.widget.TextView\");\n        // When the book is first added to library it may not appear in\n        // cardsGrid until it has been fully downloaded. Wait for fully\n        // downloaded books\n        UiObject downloadComplete =\n            mDevice.findObject(new UiSelector().fromParent(bookSelector)\n                                               .description(\"100% downloaded\"));\n        if (!downloadComplete.waitForExists(maxWaitTime)) {\n                throw new UiObjectNotFoundException(\n                        \"Exceeded maximum wait time (\" + maxWaitTimeSeconds  +\n                        \" seconds) to download book \\\"\" + bookTitle + \"\\\"\");\n        }\n\n        logger.start();\n        book.click();\n        waitForPage();\n        logger.stop();\n    }\n\n    // Creates a watcher for when a pop up warning appears when pages are out\n    // of sync across multiple devices.\n    private UiWatcher createPopUpWatcher() throws Exception {\n        UiWatcher pageSyncPopUpWatcher = new UiWatcher() {\n            @Override\n            public boolean checkForCondition() {\n                UiObject popUpDialogue =\n                    mDevice.findObject(new UiSelector().textStartsWith(\"You're on page\")\n                                                       .resourceId(\"android:id/message\"));\n                // Don't sync and stay on the current page\n                if (popUpDialogue.exists()) {\n                    try {\n                        UiObject stayOnPage =\n                            mDevice.findObject(new UiSelector().text(\"Yes\")\n                                                               .className(\"android.widget.Button\"));\n                        stayOnPage.click();\n                    } catch (UiObjectNotFoundException e) {\n                        e.printStackTrace();\n                    }\n                    return popUpDialogue.waitUntilGone(viewTimeout);\n                }\n                return false;\n            }\n        };\n        return pageSyncPopUpWatcher;\n    }\n\n    private void selectChapter(final int chapterPageNumber) throws Exception {\n        getDropdownMenu();\n\n        UiObject contents = getUiObjectByResourceId(packageID + \"menu_reader_toc\");\n        contents.clickAndWaitForNewWindow(uiAutoTimeout);\n        UiObject toChapterView = getUiObjectByResourceId(packageID + \"toc_list_view\",\n                                                         \"android.widget.ExpandableListView\");\n        // Navigate to top of chapter view\n        searchPage(toChapterView, 1, Direction.UP, 10);\n        // Search for chapter page number\n        UiObject page = searchPage(toChapterView, chapterPageNumber, Direction.DOWN, 10);\n        // Go to the page\n        page.clickAndWaitForNewWindow(viewTimeout);\n\n        waitForPage();\n    }\n\n    private void gesturesTest() throws Exception {\n        String testTag = \"gesture\";\n\n        // Perform a range of swipe tests while browsing home photoplaybooks gallery\n        LinkedHashMap<String, GestureTestParams> testParams = new LinkedHashMap<String, GestureTestParams>();\n        testParams.put(\"swipe_left\", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.LEFT, 20));\n        testParams.put(\"swipe_right\", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.RIGHT, 20));\n        testParams.put(\"pinch_out\", new GestureTestParams(GestureType.PINCH, PinchType.OUT, 100, 50));\n        testParams.put(\"pinch_in\", new GestureTestParams(GestureType.PINCH, PinchType.IN, 100, 50));\n\n        Iterator<Entry<String, GestureTestParams>> it = testParams.entrySet().iterator();\n\n        while (it.hasNext()) {\n            Map.Entry<String, GestureTestParams> pair = it.next();\n            GestureType type = pair.getValue().gestureType;\n            Direction dir = pair.getValue().gestureDirection;\n            PinchType pinch = pair.getValue().pinchType;\n            int steps = pair.getValue().steps;\n            int percent = pair.getValue().percent;\n\n            String runName = String.format(testTag + \"_\" + pair.getKey());\n            ActionLogger logger = new ActionLogger(runName, parameters);\n\n            UiObject pageView = waitForPage();\n\n            logger.start();\n\n            switch (type) {\n                case UIDEVICE_SWIPE:\n                    uiDeviceSwipe(dir, steps);\n                    break;\n                case PINCH:\n                    uiObjectVertPinch(pageView, pinch, steps, percent);\n                    break;\n                default:\n                    break;\n            }\n\n            logger.stop();\n        }\n\n        waitForPage();\n    }\n\n    private void addNote(final String text) throws Exception {\n        String testTag = \"note_add\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        hideDropDownMenu();\n\n        UiObject clickable = mDevice.findObject(new UiSelector().longClickable(true));\n        if (!clickable.exists()){\n            clickable = mDevice.findObject(new UiSelector().resourceIdMatches(\".*/main_page\"));\n        }\n        if (!clickable.exists()){\n            clickable = mDevice.findObject(new UiSelector().resourceIdMatches(\".*/reader\"));\n        }\n\n        logger.start();\n\n        uiObjectPerformLongClick(clickable, 100);\n\n        UiObject addNoteButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"add_note_button\"));\n        addNoteButton.click();\n\n        UiObject noteEditText = getUiObjectByResourceId(packageID + \"note_edit_text\",\n                                                        \"android.widget.EditText\");\n        noteEditText.setText(text);\n\n        clickUiObject(BY_ID, packageID + \"note_menu_button\", \"android.widget.ImageButton\");\n        clickUiObject(BY_TEXT, \"Save\", \"android.widget.TextView\");\n\n        logger.stop();\n\n        waitForPage();\n    }\n\n    private void removeNote() throws Exception {\n        String testTag = \"note_remove\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject clickable = mDevice.findObject(new UiSelector().longClickable(true));\n        if (!clickable.exists()){\n            clickable = mDevice.findObject(new UiSelector().resourceIdMatches(\".*/main_page\"));\n        }\n        if (!clickable.exists()){\n            clickable = mDevice.findObject(new UiSelector().resourceIdMatches(\".*/reader\"));\n        }\n\n        logger.start();\n\n        uiObjectPerformLongClick(clickable, 100);\n\n        UiObject removeButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"remove_highlight_button\"));\n        removeButton.click();\n\n        clickUiObject(BY_TEXT, \"Remove\", \"android.widget.Button\");\n\n        logger.stop();\n\n        waitForPage();\n    }\n\n    private void searchForWord(final String text) throws Exception {\n        String testTag = \"search_word\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        // Allow extra time for search queries involing high freqency words\n        final long searchTimeout =  TimeUnit.SECONDS.toMillis(20);\n\n        getDropdownMenu();\n\n        UiObject search =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"menu_search\"));\n        search.click();\n\n        UiObject searchText =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"search_src_text\"));\n\n        logger.start();\n\n        searchText.setText(text);\n        pressEnter();\n\n        UiObject resultList =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"search_results_list\"));\n        if (!resultList.waitForExists(searchTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"search results list view\\\".\");\n        }\n\n        UiObject searchWeb =\n            mDevice.findObject(new UiSelector().textMatches(\"Search web|SEARCH WEB\")\n                                         .className(\"android.widget.TextView\"));\n        if (!searchWeb.waitForExists(searchTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"Search web view\\\".\");\n        }\n\n        logger.stop();\n\n        pressBack();\n    }\n\n    private void switchPageStyles() throws Exception {\n        String testTag = \"style\";\n\n        getDropdownMenu();\n\n        clickUiObject(BY_ID, packageID + \"menu_reader_settings\", \"android.widget.TextView\");\n\n        // Check for lighting option button on newer versions\n        UiObject lightingOptionsButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"lighting_options_button\"));\n        if (lightingOptionsButton.exists()) {\n            lightingOptionsButton.click();\n        }\n\n        String[] styles = {\"Night\", \"Sepia\", \"Day\"};\n        for (String style : styles) {\n            try {\n                ActionLogger logger = new ActionLogger(testTag + \"_\" + style, parameters);\n                UiObject pageStyle =\n                    mDevice.findObject(new UiSelector().description(style));\n\n                logger.start();\n                pageStyle.clickAndWaitForNewWindow(viewTimeout);\n                logger.stop();\n\n            } catch (UiObjectNotFoundException e) {\n                // On some devices the lighting options menu disappears\n                // between clicks. Searching for the menu again would affect\n                // the logger timings so log a message and continue\n                Log.e(\"GooglePlayBooks\", \"Could not find pageStyle \\\"\" + style + \"\\\"\");\n            }\n        }\n\n        sleep(2);\n        tapDisplayCentre(); // exit reader settings dialog\n        waitForPage();\n    }\n\n    private void aboutBook(final String bookTitle) throws Exception {\n        String testTag = \"open_about\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        getDropdownMenu();\n\n        clickUiObject(BY_DESC, \"More options\", \"android.widget.ImageView\");\n\n        UiObject bookInfo = getUiObjectByText(\"About this book\", \"android.widget.TextView\");\n\n        logger.start();\n\n        bookInfo.clickAndWaitForNewWindow(uiAutoTimeout);\n\n        UiObject detailsPanel =\n            mDevice.findObject(new UiSelector().textContains(bookTitle));\n        waitObject(detailsPanel, viewTimeoutSecs);\n\n        logger.stop();\n\n        pressBack();\n    }\n\n    // Helper for waiting on a page between actions\n    private UiObject waitForPage() throws Exception {\n        UiObject activityReader =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"activity_reader\")\n                                               .childSelector(new UiSelector()\n                                               .focusable(true)));\n        // On some devices the object in the view hierarchy is found before it\n        // becomes visible on the screen. Therefore add pause instead.\n        sleep(3);\n\n        dismissNightLight();\n\n        if (!activityReader.waitForExists(viewTimeout)) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"activity reader view\\\".\");\n        }\n\n        return activityReader;\n    }\n\n    // Helper for accessing the drop down menu\n    private void getDropdownMenu() throws Exception {\n        UiObject actionBar =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"action_bar\"));\n        if (!actionBar.exists()) {\n            tapDisplayCentre();\n            sleep(1); // Allow previous views to settle\n        }\n\n        UiObject card =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"cards\")\n                                               .className(\"android.view.ViewGroup\"));\n        if (card.exists()) {\n            // On rare occasions tapping a certain word that appears in the centre\n            // of the display will bring up a card to describe the word.\n            // (Such as a place will bring a map of its location)\n            // In this situation, tap centre to go back, and try again\n            // at a different set of coordinates\n            int x = (int)(getDisplayCentreWidth() * 0.8);\n            int y = (int)(getDisplayCentreHeight() * 0.8);\n            while (card.exists()) {\n                tapDisplay(x, y);\n                sleep(1);\n            }\n\n            tapDisplay(x, y);\n            sleep(1); // Allow previous views to settle\n        }\n\n        if (!actionBar.exists()) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"action bar\\\".\");\n        }\n    }\n\n    private void hideDropDownMenu() throws Exception {\n        UiObject actionBar =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"action_bar\"));\n        if (actionBar.exists()) {\n            tapDisplayCentre();\n            sleep(1); // Allow previous views to settle\n        }\n\n        if (actionBar.exists()) {\n            throw new UiObjectNotFoundException(\"Could not close \\\"action bar\\\".\");\n        }\n    }\n\n    private UiObject searchPage(final UiObject view, final int pagenum, final Direction updown,\n                                final int attempts) throws Exception {\n        if (attempts <= 0) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"page number\\\" after several attempts.\");\n        }\n\n        UiObject page =\n            mDevice.findObject(new UiSelector().description(String.format(\"page \" + Integer.toString(pagenum)))\n                                         .className(\"android.widget.TextView\"));\n        if (!page.exists()) {\n            // Scroll up by swiping down\n            if (updown == Direction.UP) {\n                view.swipeDown(200);\n            // Default case is to scroll down (swipe up)\n            } else {\n                view.swipeUp(200);\n            }\n            page = searchPage(view, pagenum, updown, attempts - 1);\n        }\n        return page;\n    }\n\n    private void dismissNightLight() throws Exception {\n        UiObject night =\n            mDevice.findObject(new UiSelector().text(\"Night Light makes reading easy\"));\n        if (night.exists()) {\n            clickUiObject(BY_TEXT, \"DISMISS\");\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nmkdir -p $libs_dir\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.googleplaybooks\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/googleplaybooks/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/googleslides/__init__.py",
    "content": "#    Copyright 2014-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.framework.exception import ValidationError\n\n\nclass GoogleSlides(ApkUiautoWorkload):\n\n    name = 'googleslides'\n    package_names = ['com.google.android.apps.docs.editors.slides']\n\n    description = '''\n    A workload to perform standard productivity tasks with Google Slides. The workload carries\n    out various tasks, such as creating a new presentation, adding text, images, and shapes,\n    as well as basic editing and playing a slideshow.\n    This workload should be able to run without a network connection.\n\n    There are two main scenarios:\n      1. create test: a presentation is created in-app and some editing done on it,\n      2. load test: a pre-existing PowerPoint file is copied onto the device for testing.\n\n    --- create ---\n    Create a new file in the application and perform basic editing on it. This test also\n    requires an image file specified by the param ``test_image`` to be copied onto the device.\n\n    Test description:\n\n    1. Start the app and skip the welcome screen. Dismiss the work offline banner if present.\n    2. Go to the app settings page and enables PowerPoint compatibility mode. This allows\n       PowerPoint files to be created inside Google Slides.\n    3. Create a new PowerPoint presentation in the app (PPT compatibility mode) with a title\n       slide and save it to device storage.\n    4. Insert another slide and to it insert the pushed image by picking it from the gallery.\n    5. Insert a final slide and add a shape to it. Resize and drag the shape to modify it.\n    6. Finally, navigate back to the documents list.\n\n    --- load ---\n    Copy a PowerPoint presentation onto the device to test slide navigation. The PowerPoint\n    file to be copied is given by ``test_file``.\n\n    Test description:\n\n    1. From the documents list (following the create test), open the specified PowerPoint\n       by navigating into device storage and wait for it to be loaded.\n    2. A navigation test is performed while the file is in editing mode (i.e. not slideshow).\n       swiping forward to the next slide until ``slide_count`` swipes are performed.\n    3. While still in editing mode, the same action is done in the reverse direction back to\n       the first slide.\n    4. Enter presentation mode by selecting to play the slideshow.\n    5. Swipe forward to play the slideshow, for a maximum number of ``slide_count`` swipes.\n    6. Finally, repeat the previous step in the reverse direction while still in presentation\n       mode, navigating back to the first slide.\n\n    NOTE: There are known issues with the reliability of this workload on some targets.\n    It MAY NOT ALWAYS WORK on your device. If you do run into problems, it might help to\n    set ``do_text_entry`` parameter to ``False``.\n\n    Known working APK version: 1.20.442.04.40\n    '''\n\n    parameters = [\n        Parameter('test_image', kind=str, default='uxperf_1600x1200.jpg',\n                  description='''\n                  An image to be copied onto the device that will be embedded in the\n                  PowerPoint file as part of the test.\n                  '''),\n        Parameter('test_file', kind=str, default='uxperf_test_doc.pptx',\n                  description='''\n                  If specified, the workload will copy the PowerPoint file to be used for\n                  testing onto the device. Otherwise, a file will be created inside the app.\n                  '''),\n        Parameter('slide_count', kind=int, default=5,\n                  description='''\n                  Number of slides in aforementioned local file. Determines number of\n                  swipe actions when playing slide show.\n                  '''),\n        Parameter('do_text_entry', kind=bool, default=True,\n                  description='''\n                  If set to ``True``, will attempt to enter text in the first slide as part\n                  of the test. Currently seems to be problematic on some devices, most\n                  notably Samsung devices.\n                  ''')\n    ]\n\n    # Created file will be saved with this name\n    new_doc_name = \"WORKLOAD AUTOMATION\"\n\n    def __init__(self, target, **kwargs):\n        super(GoogleSlides, self).__init__(target, **kwargs)\n        self.run_timeout = 600\n        self.deployable_assets = [self.test_image, self.test_file]\n\n    def init_resources(self, context):\n        super(GoogleSlides, self).init_resources(context)\n        # Allows for getting working directory regardless if path ends with a '/'\n        work_dir = self.target.working_directory\n        work_dir = work_dir if work_dir[-1] != os.sep else work_dir[:-1]\n        self.gui.uiauto_params['workdir_name'] = self.target.path.basename(work_dir)\n        self.gui.uiauto_params['test_file'] = self.test_file\n        self.gui.uiauto_params['slide_count'] = self.slide_count\n        self.gui.uiauto_params['do_text_entry'] = self.do_text_entry\n        self.gui.uiauto_params['new_doc_name'] = self.new_doc_name\n        # Only accept certain image formats\n        if os.path.splitext(self.test_image.lower())[1] not in ['.jpg', '.jpeg', '.png']:\n            raise ValidationError('{} must be a JPEG or PNG file'.format(self.test_image))\n        # Only accept certain presentation formats\n        if os.path.splitext(self.test_file.lower())[1] not in ['.pptx']:\n            raise ValidationError('{} must be a PPTX file'.format(self.test_file))\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.googleslides\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    // Using older version of uitautomator as the latest version can't find canvas elements.\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.1'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.googleslides\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/app/src/main/java/com/arm/wa/uiauto/googleslides/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.googleslides;\n\nimport android.graphics.Rect;\nimport android.os.Bundle;\nimport android.os.SystemClock;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.Configurator;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObject2;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.By;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.UxPerfUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;\n\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    protected Bundle parameters;\n    protected String packageName;\n    protected String packageID;\n\n    protected String newDocumentName;\n    protected String pushedDocumentName;\n    protected int slideCount;\n    protected boolean doTextEntry;\n    protected String workingDirectoryName;\n\n    public static final int WAIT_TIMEOUT_1SEC = 1000;\n    public static final int SLIDE_WAIT_TIME_MS = 200;\n    public static final int DEFAULT_SWIPE_STEPS = 10;\n\n    @Before\n    public void initialize() {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n\n        newDocumentName = parameters.getString(\"new_doc_name\");\n        pushedDocumentName = parameters.getString(\"test_file\");\n        slideCount = parameters.getInt(\"slide_count\");\n        doTextEntry = parameters.getBoolean(\"do_text_entry\");\n        workingDirectoryName = parameters.getString(\"workdir_name\");\n    }\n\n    @Test\n    public void setup() throws Exception {\n        setScreenOrientation(ScreenOrientation.NATURAL);\n        changeAckTimeout(100);\n\n        skipWelcomeScreen();\n        sleep(1);\n        dismissUpdateDialog();\n        sleep(1);\n        dismissWorkOfflineBanner();\n        sleep(1);\n        enablePowerpointCompat();\n        sleep(1);\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        testEditNewSlidesDocument(newDocumentName, workingDirectoryName, doTextEntry);\n        openDocument(pushedDocumentName, workingDirectoryName);\n        waitForProgress(WAIT_TIMEOUT_1SEC*30);\n        testSlideshowFromStorage(slideCount);\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        unsetScreenOrientation();\n    }\n\n    public void dismissWorkOfflineBanner() throws Exception {\n        UiObject banner =\n                mDevice.findObject(new UiSelector().textContains(\"Work offline\"));\n        if (banner.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            clickUiObject(BY_TEXT, \"Got it\", \"android.widget.Button\");\n        }\n    }\n\n    public void dismissUpdateDialog() throws Exception {\n        UiObject update =\n                mDevice.findObject(new UiSelector().textContains(\"App update recommended\"));\n        if (update.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            clickUiObject(BY_TEXT, \"Dismiss\");\n        }\n    }\n\n    public void enterTextInSlide(String viewName, String textToEnter) throws Exception {\n        UiObject view =\n                mDevice.findObject(new UiSelector().descriptionMatches(\".*[Cc]anvas.*\")\n                                        .childSelector(new UiSelector()\n                                        .descriptionMatches(viewName)));\n        view.click();\n        mDevice.pressEnter();\n        view.legacySetText(textToEnter);\n\n        tapOpenArea();\n        // On some devices, keyboard pops up when entering text, and takes a noticeable\n        // amount of time (few milliseconds) to disappear after clicking Done.\n        // In these cases, trying to find a view immediately after entering text leads\n        // to an exception, so a short wait-time is added for stability.\n        SystemClock.sleep(SLIDE_WAIT_TIME_MS);\n    }\n\n    public void insertSlide(String slideLayout) throws Exception {\n        UiObject add_slide =\n                mDevice.findObject(new UiSelector().descriptionContains(\"Add slide\"));\n\n        // If we can't see the add slide button the keyboard might still be visiable.\n        if (!add_slide.exists()) {\n            mDevice.pressBack();\n        }\n        add_slide.waitForExists(WAIT_TIMEOUT_1SEC);\n        add_slide.click();\n\n        UiObject slide_layout = mDevice.findObject(new UiSelector().textContains(slideLayout));\n\n        if (!slide_layout.exists()){\n            tapOpenArea();\n            UiObject done_button = mDevice.findObject(new UiSelector().resourceId(\"android:id/action_mode_close_button\"));\n            if (done_button.exists()){\n                done_button.click();\n            }\n            add_slide.click();\n        }\n        slide_layout.click();\n\n    }\n\n    public void insertImage(String workingDirectoryName) throws Exception {\n        UiObject insertButton = mDevice.findObject(new UiSelector().descriptionContains(\"Insert\"));\n        if (insertButton.exists()) {\n            insertButton.click();\n        } else {\n            clickUiObject(BY_DESC, \"More options\");\n            clickUiObject(BY_TEXT, \"Insert\");\n        }\n        clickUiObject(BY_TEXT, \"Image\", true);\n        clickUiObject(BY_TEXT, \"From photos\");\n\n        UiObject imagesFolder = mDevice.findObject(new UiSelector().className(\"android.widget.TextView\").textContains(\"Images\"));\n        UiObject moreOptions = mDevice.findObject(new UiSelector().descriptionMatches(\"More [Oo]ptions\"));\n        // On some devices the images tabs is missing so we need select the local storage.\n        UiObject localDevice = mDevice.findObject(new UiSelector().textMatches(\".*[GM]B free\"));\n        if (!imagesFolder.waitForExists(WAIT_TIMEOUT_1SEC*10)) {\n            showRoots();\n        }\n        if (imagesFolder.exists()) {\n            imagesFolder.click();\n        } else if (moreOptions.exists()){\n            // The local storage can hidden by default so we need to enable showing it.\n            moreOptions.click();\n            moreOptions.click();\n            UiObject internal_storage = mDevice.findObject(new UiSelector().textContains(\"Show internal storage\"));\n            if (internal_storage.exists()){\n                internal_storage.click();\n            }\n            mDevice.pressBack();\n            showRoots();\n        }\n        else if (localDevice.exists()){\n            localDevice.click();\n        }\n\n        UiObject folderEntry = mDevice.findObject(new UiSelector().textContains(workingDirectoryName));\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));\n        if (!folderEntry.exists() && list.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            list.scrollIntoView(folderEntry);\n        } else {\n            folderEntry.waitForExists(WAIT_TIMEOUT_1SEC*10);\n        }\n        folderEntry.clickAndWaitForNewWindow();\n\n        UiObject picture = mDevice.findObject(new UiSelector().resourceId(\"com.android.documentsui:id/details\"));\n        if (!picture.exists()) {\n            UiObject pictureAlternate = mDevice.findObject(new UiSelector().resourceId(\"com.android.documentsui:id/date\").enabled(true));\n            pictureAlternate.click();\n        } else {\n            picture.click();\n        }\n        UiObject done_button = mDevice.findObject(new UiSelector().resourceId(\"android:id/action_mode_close_button\"));\n        if (done_button.exists()){\n            done_button.click();\n        }\n    }\n\n    public void insertShape(String shapeName) throws Exception {\n        String testTag = \"shape_insert\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject insertButton =\n                mDevice.findObject(new UiSelector().descriptionContains(\"Insert\"));\n        logger.start();\n        if (insertButton.exists()) {\n            insertButton.click();\n        } else {\n            clickUiObject(BY_DESC, \"More options\");\n            clickUiObject(BY_TEXT, \"Insert\");\n        }\n        clickUiObject(BY_TEXT, \"Shape\");\n        clickUiObject(BY_DESC, shapeName);\n        logger.stop();\n    }\n\n    public void modifyShape(String shapeName) throws Exception {\n        String testTag = \"shape_resize\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        UiObject resizeHandle =\n                mDevice.findObject(new UiSelector().descriptionMatches(\".*Bottom[- ]right resize.*\"));\n        Rect bounds = resizeHandle.getVisibleBounds();\n        int newX = bounds.left - 40;\n        int newY = bounds.bottom - 40;\n        logger.start();\n        resizeHandle.dragTo(newX, newY, 40);\n        logger.stop();\n\n        testTag = \"shape_drag\";\n        logger = new ActionLogger(testTag, parameters);\n\n        UiObject shapeSelector =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"main_canvas\")\n                        .childSelector(new UiSelector()\n                                .descriptionContains(shapeName)));\n        logger.start();\n        shapeSelector.dragTo(newX, newY, 40);\n        logger.stop();\n    }\n\n    public void openDocument(String docName, String workingDirectoryName) throws Exception {\n        String testTag = \"document_open\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        clickUiObject(BY_DESC, \"Open presentation\");\n        clickUiObject(BY_TEXT, \"Device storage\", true);\n\n        // Allow access to internal storage\n        UiObject optionBtn =\n            mDevice.findObject(new UiSelector().descriptionContains(\"More options\"));\n        if (optionBtn.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            optionBtn.click();\n            UiObject showInternalBtn =\n                mDevice.findObject(new UiSelector().textContains(\"Show internal storage\"));\n            // Show internal storage, otherwise already shown so exit menu.\n            if (showInternalBtn.exists()) {\n                showInternalBtn.click();\n            }\n            else {\n                mDevice.pressBack();\n            }\n        }\n        UiObject workingDirectory = mDevice.findObject(new UiSelector().text(workingDirectoryName));\n        UiObject folderEntry = mDevice.findObject(new UiSelector().textContains(workingDirectoryName));\n\n        showRoots();\n        UiObject localDevice = mDevice.findObject(new UiSelector().textMatches(\".*[GM]B free\"));\n        localDevice.click();\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));\n        if (!folderEntry.exists() && list.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            list.scrollIntoView(folderEntry);\n        } else {\n            folderEntry.waitForExists(WAIT_TIMEOUT_1SEC);\n        }\n        clickUiObject(BY_TEXT, workingDirectoryName);\n\n        UiScrollable fileList =\n                new UiScrollable(new UiSelector().className(\"android.support.v7.widget.RecyclerView\"));\n        // Older versions of android seem to use a differnt layout\n        if (!fileList.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            fileList =\n                new UiScrollable(new UiSelector().resourceId(\"com.android.documentsui:id/list\"));\n        }\n        fileList.scrollIntoView(new UiSelector().textContains(docName));\n\n        logger.start();\n        clickUiObject(BY_TEXT, docName);\n        UiObject open =\n            mDevice.findObject(new UiSelector().text(\"Open\"));\n        if (open.exists()) {\n            open.click();\n        }\n        logger.stop();\n    }\n\n    public void newDocument() throws Exception {\n        String testTag = \"document_new\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n        logger.start();\n        clickUiObject(BY_DESC, \"New presentation\");\n        clickUiObject(BY_TEXT, \"New PowerPoint\", true);\n        logger.stop();\n        dismissUpdateDialog();\n    }\n\n     public void saveDocument(String docName) throws Exception {\n       String testTag = \"document_save\";\n       ActionLogger logger = new ActionLogger(testTag, parameters);\n\n       UiObject saveActionButton =\n           mDevice.findObject(new UiSelector().textMatches(\"[Ss]ave|SAVE|\"));\n       UiObject unsavedIndicator =\n           mDevice.findObject(new UiSelector().textContains(\"Unsaved changes\"));\n       logger.start();\n       if (saveActionButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           saveActionButton.click();\n       } else if (unsavedIndicator.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           unsavedIndicator.click();\n       }\n       clickUiObject(BY_TEXT, \"Device\");\n       UiObject save = clickUiObject(BY_TEXT, \"Save\", \"android.widget.Button\");\n\n       // Save in Downloads if present, otherwise assume a sensible defaul location\n       UiObject downloadsDir =\n            mDevice.findObject(new UiSelector().textContains(\"Downloads\"));\n        if (downloadsDir.waitForExists(WAIT_TIMEOUT_1SEC * 5)) {\n            downloadsDir.click();\n        }\n\n       if (save.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           save.click();\n       }\n       if (saveActionButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           saveActionButton.click();\n       }\n       logger.stop();\n\n       // Overwrite if prompted\n       // Should not happen under normal circumstances. But ensures test doesn't stop\n       // if a previous iteration failed prematurely and was unable to delete the file.\n       // Note that this file isn't removed during workload teardown as deleting it is\n       // part of the UiAutomator test case.\n       UiObject overwriteView =\n           mDevice.findObject(new UiSelector().textContains(\"already exists\"));\n       if (overwriteView.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           clickUiObject(BY_TEXT, \"Overwrite\");\n       }\n   }\n\n    public void deleteDocument(String docName) throws Exception {\n        String testTag = \"document_delete\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n\n        // Switch to Device file tab if present\n        UiObject deviceTab =\n            mDevice.findObject(new UiSelector().textContains(\"Device files\"));\n        if (deviceTab.waitForExists(WAIT_TIMEOUT_1SEC)){\n            deviceTab.click();\n        }\n\n        UiObject doc =\n            mDevice.findObject(new UiSelector().textContains(\"WORKLOAD\"));\n        UiObject moreActions =\n            doc.getFromParent(new UiSelector().descriptionContains(\"More actions\"));\n\n        logger.start();\n        moreActions.click();\n\n        UiObject deleteButton =\n                mDevice.findObject(new UiSelector().textMatches(\".*([Dd]elete|[Rr]emove).*\"));\n        if (deleteButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            deleteButton.click();\n        } else {\n            // Delete button not found, try to scroll the view\n            UiScrollable scrollable =\n                    new UiScrollable(new UiSelector().scrollable(true)\n                            .childSelector(new UiSelector()\n                                    .textMatches(\".*(Add people|Save to Drive).*\")));\n            if (scrollable.exists()) {\n                scrollable.scrollIntoView(deleteButton);\n            } else {\n                UiObject content =\n                    mDevice.findObject(new UiSelector().resourceIdMatches(packageID + \"(content|menu_recycler_view)\"));\n                int attemptsLeft = 10; // try a maximum of 10 swipe attempts\n                while (!deleteButton.exists() && attemptsLeft > 0) {\n                    content.swipeUp(DEFAULT_SWIPE_STEPS);\n                    attemptsLeft--;\n                }\n            }\n            deleteButton.click();\n        }\n        UiObject delete =\n                    mDevice.findObject(new UiSelector().textMatches(\"DELETE|[Dd]elete\"));\n        if (delete.exists()){\n            delete.click();\n        }\n        delete = mDevice.findObject(new UiSelector().textMatches(\"MOVE TO BIN\"));\n        if (delete.exists()){\n            delete.click();\n        }\n        logger.stop();\n    }\n\n    protected void skipWelcomeScreen() throws Exception {\n        UiObject skip =\n            mDevice.findObject(new UiSelector().textMatches(\"Skip|SKIP\"));\n        if (skip.exists()) {\n            skip.click();\n        }\n    }\n\n    protected void enablePowerpointCompat() throws Exception {\n        String testTag = \"enable_pptmode\";\n        ActionLogger logger = new ActionLogger(testTag, parameters);\n        logger.start();\n\n        // Work around to open navigation drawer via swipe.\n        uiDeviceSwipeHorizontal(0, getDisplayCentreWidth(), getDisplayCentreHeight() / 2, 10);\n\n        clickUiObject(BY_TEXT, \"Settings\");\n        clickUiObject(BY_TEXT, \"Create PowerPoint\");\n        mDevice.pressBack();\n        logger.stop();\n    }\n\n    protected void testEditNewSlidesDocument(String docName, String workingDirectoryName, boolean doTextEntry) throws Exception {\n        // Init\n        newDocument();\n        waitForProgress(WAIT_TIMEOUT_1SEC * 30);\n\n        // Slide 1 - Text\n        if (doTextEntry) {\n            enterTextInSlide(\".*[Tt]itle.*\", docName);\n            windowApplication();\n            // Save\n            saveDocument(docName);\n            sleep(1);\n        }\n\n        // Slide 2 - Image\n        insertSlide(\"Title only\");\n        insertImage(workingDirectoryName);\n        sleep(1);\n\n        // If text wasn't entered in first slide, save prompt will appear here\n        if (!doTextEntry) {\n            // Save\n            saveDocument(docName);\n            sleep(1);\n        }\n\n        // Slide 3 - Shape\n        insertSlide(\"Title slide\");\n        String shapeName = \"Rounded rectangle\";\n        insertShape(shapeName);\n        modifyShape(shapeName);\n        mDevice.pressBack();\n        UiObject today =\n            mDevice.findObject(new UiSelector().text(\"Today\"));\n        if (!today.exists()){\n            mDevice.pressBack();\n        }\n        sleep(1);\n\n        // Tidy up\n        dismissWorkOfflineBanner(); // if it appears on the homescreen\n\n        // Note: Currently disabled because it fails on Samsung devices\n        deleteDocument(docName);\n    }\n\n    protected void testSlideshowFromStorage(int slideCount) throws Exception {\n        String testTag = \"slideshow\";\n        // Begin Slide show test\n\n        // Note: Using coordinates slightly offset from the slide edges avoids accidentally\n        // selecting any shapes or text boxes inside the slides while swiping, which may\n        // cause the view to switch into edit mode and fail the test\n        UiObject slideCanvas =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"main_canvas\"));\n        Rect canvasBounds = slideCanvas.getVisibleBounds();\n        int leftEdge = canvasBounds.left + 10;\n        int rightEdge = canvasBounds.right - 10;\n        int topEdge = (canvasBounds.top + canvasBounds.bottom) * 1/3 ;\n        int bottomEdge = (canvasBounds.top + canvasBounds.bottom) * 2/3 ;\n\n        int yCoordinate = (canvasBounds.top + canvasBounds.bottom) / 2;\n        int xCoordinate = (canvasBounds.left + canvasBounds.right) / 2;\n        int slideIndex = 0;\n\n        // scroll forward in edit mode\n        ActionLogger logger = new ActionLogger(testTag + \"_editforward\", parameters);\n        logger.start();\n        while (slideIndex++ < slideCount) {\n            uiDeviceSwipeVertical(topEdge, bottomEdge, xCoordinate, DEFAULT_SWIPE_STEPS);\n            uiDeviceSwipeHorizontal(rightEdge, leftEdge, yCoordinate, DEFAULT_SWIPE_STEPS);\n            waitForProgress(WAIT_TIMEOUT_1SEC*5);\n        }\n        logger.stop();\n        sleep(1);\n\n        // scroll backward in edit mode\n        logger = new ActionLogger(testTag + \"_editbackward\", parameters);\n        logger.start();\n        while (slideIndex-- > 0) {\n            uiDeviceSwipeVertical(bottomEdge, topEdge, xCoordinate, DEFAULT_SWIPE_STEPS);\n            uiDeviceSwipeHorizontal(leftEdge, rightEdge, yCoordinate, DEFAULT_SWIPE_STEPS);\n            waitForProgress(WAIT_TIMEOUT_1SEC*5);\n        }\n        logger.stop();\n        sleep(1);\n\n        // run slideshow\n        UiObject startBtn =\n            mDevice.findObject(new UiSelector().descriptionContains(\"Start slideshow\"));\n        if (!startBtn.exists()) {\n            tapDisplayCentre();\n        }\n\n        logger = new ActionLogger(testTag + \"_run\", parameters);\n        logger.start();\n        clickUiObject(BY_DESC, \"Start slideshow\", true);\n        UiObject onDevice =\n                mDevice.findObject(new UiSelector().textContains(\"this device\"));\n        if (onDevice.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            onDevice.clickAndWaitForNewWindow();\n            waitForProgress(WAIT_TIMEOUT_1SEC*30);\n            UiObject presentation =\n                    mDevice.findObject(new UiSelector().descriptionContains(\"Presentation Viewer\"));\n            presentation.waitForExists(WAIT_TIMEOUT_1SEC*30);\n        }\n        logger.stop();\n        sleep(1);\n\n        slideIndex = 0;\n\n        // scroll forward in slideshow mode\n        logger = new ActionLogger(testTag + \"_playforward\", parameters);\n        logger.start();\n        while (slideIndex++ < slideCount) {\n            uiDeviceSwipeHorizontal(rightEdge, leftEdge, yCoordinate, DEFAULT_SWIPE_STEPS);\n            waitForProgress(WAIT_TIMEOUT_1SEC*5);\n        }\n        logger.stop();\n        sleep(1);\n\n        // scroll backward in slideshow mode\n        logger = new ActionLogger(testTag + \"_playbackward\", parameters);\n        logger.start();\n        while (slideIndex-- > 0) {\n            uiDeviceSwipeHorizontal(leftEdge, rightEdge, yCoordinate, DEFAULT_SWIPE_STEPS);\n            waitForProgress(WAIT_TIMEOUT_1SEC*5);\n        }\n        logger.stop();\n        sleep(1);\n\n        mDevice.pressBack();\n        mDevice.pressBack();\n    }\n\n    protected boolean waitForProgress(int timeout) throws Exception {\n        UiObject progress = mDevice.findObject(new UiSelector().className(\"android.widget.ProgressBar\"));\n        if (progress.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            return progress.waitUntilGone(timeout);\n        } else {\n            return false;\n        }\n    }\n\n    private long changeAckTimeout(long newTimeout) {\n        Configurator config = Configurator.getInstance();\n        long oldTimeout = config.getActionAcknowledgmentTimeout();\n        config.setActionAcknowledgmentTimeout(newTimeout);\n        return oldTimeout;\n    }\n\n    private void tapOpenArea() throws Exception {\n        UiObject openArea = getUiObjectByResourceId(packageID + \"punch_view_pager\");\n        Rect bounds = openArea.getVisibleBounds();\n        // 10px from top of view, 10px from the right edge\n        tapDisplay(bounds.right - 10, bounds.top + 10);\n    }\n\n    public void windowApplication() throws Exception {\n        UiObject window =\n                mDevice.findObject(new UiSelector().resourceId(\"android:id/restore_window\"));\n        if (window.waitForExists(WAIT_TIMEOUT_1SEC)){\n            window.click();\n        }\n    }\n\n    private void showRoots() throws Exception {\n        UiObject rootMenu =\n            mDevice.findObject(new UiSelector().descriptionContains(\"Show root\"));\n        rootMenu.click();\n    }\n}\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base classes from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# ant build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.googleslides\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/googleslides/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/hackbench/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pylint: disable=W0201, C0103\n\nimport os\nimport re\n\nfrom wa import Workload, Parameter, Executable\nfrom wa.utils.exec_control import once\n\n\ntimeout_buffer = 10\n\nhackbench_results_txt = 'hackbench_results.txt'\n\nregex_map = {\"total_groups\": (re.compile(r'(\\d+) groups'), \"groups\"),\n             \"total_fd\": (re.compile(r'(\\d+) file descriptors'), \"file_descriptors\"),\n             \"total_messages\": (re.compile(r'(\\d+) messages'), \"messages\"),\n             \"total_bytes\": (re.compile(r'(\\d+) bytes'), \"bytes\"),\n             \"test_time\": (re.compile(r'Time: (\\d+.*)'), \"seconds\")\n             }\n\n\nclass Hackbench(Workload):\n\n    name = 'hackbench'\n    description = \"\"\"\n    Hackbench runs a series of tests for the Linux scheduler.\n\n    For details, go to:\n    https://github.com/linux-test-project/ltp/\n\n    \"\"\"\n\n    parameters = [\n        Parameter('timeout', kind=int, default=30, aliases=['duration'],\n                  description='Expected test duration in seconds.'),\n        Parameter('datasize', kind=int, default=100, description='Message size in bytes.'),\n        Parameter('groups', kind=int, default=10, description='Number of groups.'),\n        Parameter('loops', kind=int, default=100, description='Number of loops.'),\n        Parameter('fds', kind=int, default=40, description='Number of file descriptors.'),\n        Parameter('extra_params', kind=str, default='',\n                  description='''\n                  Extra parameters to pass in. See the hackbench man page\n                  or type `hackbench --help` for list of options.\n                  '''),\n    ]\n\n    binary_name = 'hackbench'\n\n    @once\n    def initialize(self, context):\n        host_binary = context.get_resource(Executable(self, self.target.abi, self.binary_name))\n        Hackbench.target_binary = self.target.install(host_binary)\n\n    def setup(self, context):\n        self.target_output_file = self.target.get_workpath(hackbench_results_txt)\n        self.run_timeout = self.timeout + timeout_buffer\n        command_format = '{} -s {} -g {} -l {} {} > {}'\n        self.command = command_format.format(self.target_binary, self.datasize, self.groups,\n                                             self.loops, self.extra_params, self.target_output_file)\n\n    def run(self, context):\n        self.target.execute(self.command, timeout=self.run_timeout)\n\n    def extract_results(self, context):\n        host_output_file = os.path.join(context.output_directory, hackbench_results_txt)\n        self.target.pull(self.target_output_file, host_output_file)\n        context.add_artifact('hackbench-results', host_output_file, kind='raw')\n\n    def update_output(self, context):\n        results_file = context.get_artifact_path('hackbench-results')\n        with open(results_file) as fh:\n            for line in fh:\n                for label, (regex, units) in regex_map.items():\n                    match = regex.search(line)\n                    if match:\n                        context.add_metric(label, float(match.group(1)), units)\n\n    def teardown(self, context):\n        if self.cleanup_assets:\n            self.target.execute('rm -f {}'.format(self.target_output_file))\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall(self.binary_name)\n"
  },
  {
    "path": "wa/workloads/hackbench/src/LICENSE",
    "content": "hackbench is licensed under GPLv2.\n\nSource for these binaries can be obtained here:\n\nhttp://git.kernel.org/cgit/linux/kernel/git/clrkwllms/rt-tests.git\n"
  },
  {
    "path": "wa/workloads/homescreen/__init__.py",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101\n\nfrom wa import Workload, Parameter\n\n\nclass HomeScreen(Workload):\n\n    name = 'homescreen'\n    description = \"\"\"\n    A workload that goes to the home screen and idles for the the\n    specified duration.\n\n    \"\"\"\n    supported_platforms = ['android']\n\n    parameters = [\n        Parameter('duration', kind=int, default=20,\n                  description='Specifies the duration, in seconds, of this workload.'),\n    ]\n\n    def setup(self, context):\n        self.target.clear_logcat()\n        self.target.execute('input keyevent 3')  # press the home key\n\n    def run(self, context):\n        self.target.sleep(self.duration)\n"
  },
  {
    "path": "wa/workloads/honorofkings/__init__.py",
    "content": "#    Copyright 2025 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import ApkReventWorkload, Parameter\n\n\nclass HoK(ApkReventWorkload):\n    name = 'honorofkings'\n    uninstall = False\n    clear_data_on_reset = False  # Don't clear assets on exit\n    requires_network = True  # The game requires network connection\n    description = (\n        'Launch a match replay in Honor of Kings.\\n\\n'\n        'The game must already have a user logged in and the plugins downloaded.'\n    )\n    package_names = [\n        'com.levelinfinite.sgameGlobal',\n        'com.tencent.tmgp.sgame',\n    ]\n\n    parameters = [\n        Parameter(\n            'activity',\n            kind=str,\n            default='.SGameGlobalActivity',\n            description='Activity name of Honor of Kings game.',\n        ),\n        Parameter(\n            'replay_file',\n            kind=str,\n            default='replay.abc',\n            description='Honor of Kings Replay file name.',\n        ),\n    ]\n\n    def setup(self, context):\n        upload_dir = self.target.path.join(\n            self.target.external_storage_app_dir,\n            self.apk.apk_info.package,\n            'files',\n            'Replay'\n        )\n        replay_file = os.path.join(self.dependencies_directory, self.replay_file)\n        self.logger.debug('Uploading \"%s\" to \"%s\"...', replay_file, upload_dir)\n        self.target.push(replay_file, upload_dir)\n\n        super().setup(context)\n"
  },
  {
    "path": "wa/workloads/hwuitest/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\nimport re\nfrom collections import defaultdict\n\nfrom wa import Workload, Parameter, Executable\nfrom wa.utils.exec_control import once\nfrom wa.utils.types import caseless_string\n\n\nBINARY = \"hwuitest\"\nIGNORED_METRICS = [\"Stats since\", \"Total frames rendered\"]\n\n\nclass HWUITest(Workload):\n\n    name = 'hwuitest'\n    description = \"\"\"\n    Tests UI rendering latency on Android devices.\n\n    The binary for this workload is built as part of AOSP's\n    frameworks/base/libs/hwui component.\n    \"\"\"\n    supported_platforms = ['android']\n\n    parameters = [\n        Parameter('test', kind=caseless_string, default=\"shadowgrid\",\n                  allowed_values=[\"shadowgrid\", \"rectgrid\", \"oval\"],\n                  description=\"\"\"\n                  The test to run:\n\n                     - ``'shadowgrid'``: creates a grid of rounded rects that\n                       cast shadows, high CPU & GPU load\n                     - ``'rectgrid'``: creates a grid of 1x1 rects\n                     - ``'oval'``: draws 1 oval\n                  \"\"\"),\n        Parameter('loops', kind=int, default=3,\n                  description=\"The number of test iterations.\"),\n        Parameter('frames', kind=int, default=150,\n                  description=\"The number of frames to run the test over.\"),\n    ]\n\n    def __init__(self, target, *args, **kwargs):\n        super(HWUITest, self).__init__(target, *args, **kwargs)\n        HWUITest.target_exe = None\n\n    @once\n    def initialize(self, context):\n        host_exe = context.get_resource(Executable(self,\n                                                   self.target.abi,\n                                                   BINARY))\n        HWUITest.target_exe = self.target.install(host_exe)\n\n    def run(self, context):\n        self.output = None\n        self.output = self.target.execute(\"{} {} {} {}\".format(self.target_exe,\n                                                               self.test.lower(),\n                                                               self.loops,\n                                                               self.frames))\n\n    def extract_results(self, context):\n        if not self.output:\n            return\n        outfile = os.path.join(context.output_directory, 'hwuitest.output')\n        with open(outfile, 'w') as wfh:\n            wfh.write(self.output)\n        context.add_artifact('hwuitest', outfile, kind='raw')\n\n    def update_output(self, context):\n        normal = re.compile(r'(?P<value>\\d*)(?P<unit>\\w*)')\n        with_pct = re.compile(r'(?P<value>\\d*) \\((?P<percent>.*)%\\)')\n        count = 0\n        for line in self.output.splitlines():\n            #Filters out \"Success!\" and blank lines\n            try:\n                metric, value_string = [p.strip() for p in line.split(':', 1)]\n            except ValueError:\n                continue\n\n            # Filters out unwanted lines\n            if metric in IGNORED_METRICS:\n                continue\n\n            if metric == \"Janky frames\":\n                count += 1\n                match = with_pct.match(value_string).groupdict()\n                context.add_metric(metric,\n                                   match['value'],\n                                   None,\n                                   classifiers={\"loop\": count,\n                                                \"frames\": self.frames})\n                context.add_metric(metric + \"_pct\",\n                                   match['percent'],\n                                   \"%\",\n                                   classifiers={\"loop\": count,\n                                                \"frames\": self.frames})\n            else:\n                match = normal.match(value_string).groupdict()\n                context.add_metric(metric,\n                                   match['value'],\n                                   match['unit'],\n                                   classifiers={\"loop\": count,\n                                                \"frames\": self.frames})\n\n    @once\n    def finalize(self, context):\n        if self.target_exe and self.uninstall:\n            self.target.uninstall(self.target_exe)\n"
  },
  {
    "path": "wa/workloads/idle.py",
    "content": "#    Copyright 2014-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101\n\nfrom wa import Workload, Parameter, WorkloadError, ConfigError\n\n\nclass IdleWorkload(Workload):\n\n    name = 'idle'\n    description = \"\"\"\n    Do nothing for the specified duration.\n\n    On android devices, this may optionally stop the Android run time, if\n    ``stop_android`` is set to ``True``.\n\n    .. note:: This workload requires the device to be rooted.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('duration', kind=int, default=20,\n                  description='''\n                  Specifies the duration, in seconds, of this workload.\n                  '''),\n        Parameter('screen_off', kind=bool, default=False,\n                  description='''\n                  Ensure that the screen is off before idling.\n\n                  .. note:: Make sure screen lock is disabled on the target!\n\n                  '''),\n        Parameter('stop_android', kind=bool, default=False,\n                  description='''\n                  Specifies whether the Android run time should be stopped.\n                  (Can be set only for Android devices).\n                  '''),\n    ]\n\n    def initialize(self, context):\n        self.old_screen_state = None\n        if self.target.os == 'android':\n            if self.stop_android and not self.target.is_rooted:\n                msg = 'Idle workload requires the device to be rooted in order to stop Android.'\n                raise WorkloadError(msg)\n        else:\n            if self.stop_android or self.screen_off:\n                msg = 'stop_android/screen_off can only be set for Android devices'\n                raise ConfigError(msg)\n\n    def setup(self, context):\n        if self.target.os == 'android':\n            self.old_screen_state = self.target.is_screen_on()\n            self.target.ensure_screen_is_on()\n            self.target.homescreen()\n            if self.screen_off:\n                self.target.ensure_screen_is_off()\n\n    def run(self, context):\n        self.logger.debug('idling...')\n        if self.stop_android:\n            timeout = self.duration + 10\n            self.target.execute('stop && sleep {} && start'.format(self.duration),\n                                timeout=timeout, as_root=True)\n        else:\n            self.target.sleep(self.duration)\n\n    def teardown(self, context):\n        if self.stop_android:\n            self.logger.debug('Waiting for Android restart to complete...')\n            # Wait for the boot animation to start and then to finish.\n            while self.target.getprop('init.svc.bootanim') == 'stopped':\n                self.target.sleep(0.2)\n            while self.target.getprop('init.svc.bootanim') == 'running':\n                self.target.sleep(1)\n        if self.screen_off and self.old_screen_state:\n            self.target.ensure_screen_is_on()\n        elif (self.target.os == 'android'\n                and not self.screen_off and not self.old_screen_state):\n            self.target.ensure_screen_is_off()\n"
  },
  {
    "path": "wa/workloads/jankbench/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201,E0203\n\n\nimport os\nimport re\nimport select\nimport json\nimport threading\nimport sqlite3\nimport subprocess\nimport sys\nfrom copy import copy\n\nimport pandas as pd\n\nfrom wa import ApkWorkload, Parameter, WorkloadError, ConfigError\nfrom wa.utils.types import list_or_string, numeric\n\n\nDELAY = 2\n\n\nclass Jankbench(ApkWorkload):\n\n    name = 'jankbench'\n    description = \"\"\"\n    Internal Google benchmark for evaluating jank on Android.\n\n    \"\"\"\n    package_names = ['com.android.benchmark']\n    activity = '.app.RunLocalBenchmarksActivity'\n\n    results_db_file = 'BenchmarkResults'\n\n    iteration_regex = re.compile(r'System.out: iteration: (?P<iteration>[0-9]+)')\n    metrics_regex = re.compile(\n        r'System.out: Mean: (?P<mean>[0-9\\.]+)\\s+JankP: (?P<junk_p>[0-9\\.]+)\\s+'\n        r'StdDev: (?P<std_dev>[0-9\\.]+)\\s+Count Bad: (?P<count_bad>[0-9]+)\\s+'\n        r'Count Jank: (?P<count_junk>[0-9]+)'\n    )\n\n    valid_test_ids = [\n        # Order matters -- the index of the id must match what is expected by\n        # the App.\n        'list_view',\n        'image_list_view',\n        'shadow_grid',\n        'low_hitrate_text',\n        'high_hitrate_text',\n        'edit_text',\n        'overdraw_test',\n    ]\n\n    parameters = [\n        Parameter('test_ids', kind=list_or_string,\n                  allowed_values=valid_test_ids,\n                  description='ID of the jankbench test to be run.'),\n        Parameter('loops', kind=int, default=1, constraint=lambda x: x > 0, aliases=['reps'],\n                  description='''\n                  Specifies the number of times the benchmark will be run in a \"tight loop\",\n                  i.e. without performaing setup/teardown inbetween.\n                  '''),\n        Parameter('pull_results_db', kind=bool,\n                  description='''\n                  Secifies whether an sqlite database with detailed results should be pulled\n                  from benchmark app's data. This requires the device to be rooted.\n\n                  This defaults to ``True`` for rooted devices and ``False`` otherwise.\n                  '''),\n        Parameter('timeout', kind=int, default=10 * 60, aliases=['run_timeout'],\n                  description=\"\"\"\n                  Time out for workload execution. The workload will be killed if it hasn't completed\n                  within this period.\n                  \"\"\"),\n    ]\n\n    def setup(self, context):\n        super(Jankbench, self).setup(context)\n\n        if self.pull_results_db is None:\n            self.pull_results_db = self.target.is_rooted\n        elif self.pull_results_db and not self.target.is_rooted:\n            raise ConfigError('pull_results_db set for an unrooted device')\n\n        if not self.target.is_container:\n            self.target.ensure_screen_is_on()\n\n        self.command = self._build_command()\n        self.monitor = JankbenchRunMonitor(self.target)\n        self.monitor.start()\n\n    def run(self, context):\n        result = self.target.execute(self.command, timeout=self.timeout)\n        if 'FAILURE' in result:\n            raise WorkloadError(result)\n        else:\n            self.logger.debug(result)\n        self.target.sleep(DELAY)\n        self.monitor.wait_for_run_end(self.timeout)\n\n    def extract_results(self, context):\n        self.monitor.stop()\n        if self.pull_results_db:\n            target_file = self.target.path.join(self.target.package_data_directory,\n                                                self.package, 'databases', self.results_db_file)\n            host_file = os.path.join(context.output_directory, self.results_db_file)\n            self.target.pull(target_file, host_file, as_root=True)\n            context.add_artifact('jankbench-results', host_file, 'data')\n\n    def update_output(self, context):  # NOQA\n        super(Jankbench, self).update_output(context)\n        if self.pull_results_db:\n            self.extract_metrics_from_db(context)\n        else:\n            self.extract_metrics_from_logcat(context)\n\n    def extract_metrics_from_db(self, context):  # pylint: disable=no-self-use\n        dbfile = context.get_artifact_path('jankbench-results')\n        with sqlite3.connect(dbfile) as conn:\n            df = pd.read_sql('select name, iteration, total_duration, jank_frame from ui_results', conn)\n            g = df.groupby(['name', 'iteration'])\n            janks = g.jank_frame.sum()\n            janks_pc = janks / g.jank_frame.count() * 100\n            results = pd.concat([\n                g.total_duration.mean(),\n                g.total_duration.std(),\n                janks,\n                janks_pc,\n            ], axis=1)\n            results.columns = ['mean', 'std_dev', 'count_jank', 'jank_p']\n\n            for test_name, rep in results.index:\n                test_results = results.loc[test_name, rep]\n                for metric, value in test_results.items():\n                    context.add_metric(metric, value, units=None, lower_is_better=True,\n                                       classifiers={'test_name': test_name, 'rep': rep})\n\n    def extract_metrics_from_logcat(self, context):\n        metric_names = ['mean', 'junk_p', 'std_dev', 'count_bad', 'count_junk']\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            run_tests = copy(self.test_ids or self.valid_test_ids)\n            current_iter = None\n            current_test = None\n            for line in fh:\n\n                match = self.iteration_regex.search(line)\n                if match:\n                    if current_iter is not None:\n                        msg = 'Did not see results for iteration {} of {}'\n                        self.logger.warning(msg.format(current_iter, current_test))\n                    current_iter = int(match.group('iteration'))\n                    if current_iter == 0:\n                        try:\n                            current_test = run_tests.pop(0)\n                        except IndexError:\n                            self.logger.warning('Encountered an iteration for an unknown test.')\n                            current_test = 'unknown'\n                    continue\n\n                match = self.metrics_regex.search(line)\n                if match:\n                    if current_iter is None:\n                        self.logger.warning('Encountered unexpected metrics (no iteration)')\n                        continue\n\n                    for name in metric_names:\n                        value = numeric(match.group(name))\n                        context.add_metric(name, value, units=None, lower_is_better=True,\n                                           classifiers={'test_id': current_test, 'rep': current_iter})\n\n                    current_iter = None\n\n    def _build_command(self):\n        command_params = []\n        if self.test_ids:\n            test_idxs = [str(self.valid_test_ids.index(i)) for i in self.test_ids]\n            command_params.append('--eia com.android.benchmark.EXTRA_ENABLED_BENCHMARK_IDS {}'.format(','.join(test_idxs)))\n        if self.loops:\n            command_params.append('--ei com.android.benchmark.EXTRA_RUN_COUNT {}'.format(self.loops))\n        return 'am start -W -S -n {}/{} {}'.format(self.package,\n                                                   self.activity,\n                                                   ' '.join(command_params))\n\n\nclass JankbenchRunMonitor(threading.Thread):\n\n    regex = re.compile(r'I BENCH\\s+:\\s+BenchmarkDone!')\n\n    def __init__(self, device):\n        super(JankbenchRunMonitor, self).__init__()\n        self.target = device\n        self.daemon = True\n        self.run_ended = threading.Event()\n        self.stop_event = threading.Event()\n        self.target.clear_logcat()\n        if self.target.adb_name:\n            self.command = ['adb', '-s', self.target.adb_name, 'logcat']\n        else:\n            self.command = ['adb', 'logcat']\n\n    def run(self):\n        proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n        while not self.stop_event.is_set():\n            if self.run_ended.is_set():\n                self.target.sleep(DELAY)\n            else:\n                ready, _, _ = select.select([proc.stdout, proc.stderr], [], [], 2)\n                if ready:\n                    line = ready[0].readline()\n                    line = line.decode(sys.stdout.encoding, 'replace')\n                    if self.regex.search(line):\n                        self.run_ended.set()\n        proc.terminate()\n\n    def stop(self):\n        self.stop_event.set()\n        self.join()\n\n    def wait_for_run_end(self, timeout):\n        self.run_ended.wait(timeout)\n        self.run_ended.clear()\n"
  },
  {
    "path": "wa/workloads/lmbench/__init__.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\n\nfrom wa import Workload, Parameter, Executable\nfrom wa.utils.types import list_or_integer, list_or_string\nfrom wa.utils.types import cpu_mask\n\n\nclass Lmbench(Workload):\n\n    name = 'lmbench'\n\n    # Define supported tests. Each requires a _setup_{name} routine below\n    test_names = ['lat_mem_rd', 'bw_mem']\n\n    description = \"\"\"\n    Run a subtest from lmbench, a suite of portable ANSI/C microbenchmarks for\n    UNIX/POSIX.\n    In general, lmbench measures two key features: latency and bandwidth. This\n    workload supports a subset of lmbench tests.  lat_mem_rd can be used to\n    measure latencies to memory (including caches). bw_mem can be used to\n    measure bandwidth to/from memory over a range of operations.\n    Further details, and source code are available from:\n\n        http://sourceforge.net/projects/lmbench/.\n\n    See lmbench/bin/README for license details.\n    \"\"\"\n\n    parameters = [\n        Parameter('test', default='lat_mem_rd', allowed_values=test_names,\n                  description='''\n                  Specifies an lmbench test to run.\n                  '''),\n        Parameter('stride', kind=list_or_integer, default=[128],\n                  description='''\n                  Stride for lat_mem_rd test. Workload will iterate over one or\n                  more integer values.\n                  '''),\n        Parameter('thrash', kind=bool, default=True,\n                  description='Sets -t flag for lat_mem_rd_test'),\n        Parameter('size', kind=list_or_string, default=\"4m\",\n                  description='Data set size for lat_mem_rd bw_mem tests.'),\n        Parameter('mem_category', kind=list_or_string,\n                  default=('rd', 'wr', 'cp', 'frd', 'fwr', 'fcp', 'bzero', 'bcopy'),\n                  description='List of memory catetories for bw_mem test.'),\n        Parameter('parallelism', kind=int, default=None,\n                  description='Parallelism flag for tests that accept it.'),\n        Parameter('warmup', kind=int, default=None,\n                  description='Warmup flag for tests that accept it.'),\n        Parameter('repetitions', kind=int, default=None,\n                  description='Repetitions flag for tests that accept it.'),\n        Parameter('force_abi', kind=str, default=None,\n                  description='''\n                  Override device abi with this value. Can be used to force\n                  arm32 on 64-bit devices.\n                  '''),\n        Parameter('run_timeout', kind=int, default=900,\n                  description=\"\"\"\n                  Timeout for execution of the test.\n                  \"\"\"),\n        Parameter('loops', kind=int, default=1, constraint=lambda x: x > 0,\n                  description=\"\"\"\n                  Specifies the number of times the benchmark will be run in a\n                  \"tight loop\", i.e. without performaing setup/teardown\n                  inbetween. This parameter is distinct from \"repetitions\", as\n                  the latter takes place within the benchmark and produces a\n                  single result.\n                  \"\"\"),\n        Parameter('cpus', kind=cpu_mask, default=0, aliases=['taskset_mask'],\n                  description=\"\"\"\n                  Specifies the CPU mask the benchmark process will be pinned to.\n                  \"\"\"),\n    ]\n\n    def setup(self, context):\n\n        abi = self.target.abi\n        if self.force_abi:\n            abi = self.force_abi\n\n        # self.test has been pre-validated, so this _should_ only fail if there's an abi mismatch\n        host_exe = context.resolver.get(Executable(self, abi, self.test))\n        self.target_exe = self.target.install(host_exe)\n        self.commands = []\n\n        setup_test = getattr(self, '_setup_{}'.format(self.test))\n        setup_test()\n\n    def run(self, context):\n        for _ in range(self.loops):\n            for command in self.commands:\n                self.target.execute(command, timeout=self.run_timeout)\n\n    def extract_results(self, context):\n        results_path = self.target.get_workpath(\"lmbench.output\")\n        result_file = self.target.list_directory(results_path)[-1]\n        self.result_file = result_file.rstrip()\n        result = self.target.path.join(results_path, result_file)\n        self.target.pull(result, context.output_directory)\n        context.add_artifact('lmbench-result', \"lmbench.output\", kind='raw')\n\n    def teardown(self, context):\n        if self.uninstall:\n            self.target.uninstall(self.test)\n\n    #\n    # Test setup routines\n    #\n    def _setup_lat_mem_rd(self):\n        target_file = self.target.get_workpath('lmbench.output')\n        self.target.execute('rm -f {}'.format(target_file))\n\n        command_stub = self._setup_common()\n        if self.thrash:\n            command_stub = '{} -t'.format(command_stub)\n\n        for size in self.size:\n            command = '{} {}'.format(command_stub, size)\n            for stride in self.stride:\n                self.commands.append('{} {} >> {} 2>&1'.format(command, stride, target_file))\n\n    def _setup_bw_mem(self):\n        target_file = self.target.get_workpath('lmbench.output')\n        self.target.execute('rm -f {}'.format(target_file))\n\n        command_stub = self._setup_common()\n\n        for size in self.size:\n            command = '{} {}'.format(command_stub, size)\n            for category in self.mem_category:\n                self.commands.append('{} {} >> {} 2>&1'.format(command, category, target_file))\n\n    def _setup_common(self):\n        parts = []\n        if self.cpus:\n            parts.append('{} taskset {} {}'.format(self.target.busybox,\n                                                   self.cpus.mask(),\n                                                   self.target_exe))\n        else:\n            parts.append(self.target_exe)\n        if self.parallelism is not None:\n            parts.append('-P {}'.format(self.parallelism))\n        if self.warmup is not None:\n            parts.append('-W {}'.format(self.warmup))\n        if self.repetitions is not None:\n            parts.append('-N {}'.format(self.repetitions))\n        return ' '.join(parts) + ' '\n"
  },
  {
    "path": "wa/workloads/lmbench/bin/COPYING",
    "content": "\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1991 Free Software Foundation, Inc.\n                          675 Mass Ave, Cambridge, MA 02139, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n"
  },
  {
    "path": "wa/workloads/lmbench/bin/COPYING-2",
    "content": "%M% %I% %E%\n\nThe set of programs and documentation known as \"lmbench\" are distributed\nunder the Free Software Foundation's General Public License with the\nfollowing additional restrictions (which override any conflicting\nrestrictions in the GPL):\n\n1. You may not distribute results in any public forum, in any publication,\n   or in any other way if you have modified the benchmarks.  \n\n2. You may not distribute the results for a fee of any kind.  This includes\n   web sites which generate revenue from advertising.\n\nIf you have modifications or enhancements that you wish included in\nfuture versions, please mail those to me, Larry McVoy, at lm@bitmover.com.\n\n=========================================================================\n\nRationale for the publication restrictions:\n\nIn summary:\n\n    a) LMbench is designed to measure enough of an OS that if you do well in\n       all catagories, you've covered latency and bandwidth in networking,\n       disks, file systems, VM systems, and memory systems.\n    b) Multiple times in the past people have wanted to report partial results.\n       Without exception, they were doing so to show a skewed view of whatever\n       it was they were measuring (for example, one OS fit small processes into\n       segments and used the segment register to switch them, getting good \n       results, but did not want to report large process context switches \n       because those didn't look as good).\n    c) We insist that if you formally report LMbench results, you have to\n       report all of them and make the raw results file easily available.\n       Reporting all of them means in that same publication, a pointer\n       does not count.  Formally, in this context, means in a paper,\n       on a web site, etc., but does not mean the exchange of results\n       between OS developers who are tuning a particular subsystem.\n\nWe have a lot of history with benchmarking and feel strongly that there\nis little to be gained and a lot to be lost if we allowed the results\nto be published in isolation, without the complete story being told.\n\nThere has been a lot of discussion about this, with people not liking this\nrestriction, more or less on the freedom principle as far as I can tell.\nWe're not swayed by that, our position is that we are doing the right\nthing for the OS community and will stick to our guns on this one.\n\nIt would be a different matter if there were 3 other competing\nbenchmarking systems out there that did what LMbench does and didn't have\nthe same reporting rules.  There aren't and as long as that is the case,\nI see no reason to change my mind and lots of reasons not to do so.  I'm\nsorry if I'm a pain in the ass on this topic, but I'm doing the right\nthing for you and the sooner people realize that the sooner we can get on\nto real work.\n\nOperating system design is a largely an art of balancing tradeoffs.\nIn many cases improving one part of the system has negative effects\non other parts of the system.  The art is choosing which parts to\noptimize and which to not optimize.  Just like in computer architecture,\nyou can optimize the common instructions (RISC) or the uncommon\ninstructions (CISC), but in either case there is usually a cost to\npay (in RISC uncommon instructions are more expensive than common\ninstructions, and in CISC common instructions are more expensive\nthan required).  The art lies in knowing which operations are \nimportant and optmizing those while minimizing the impact on the\nrest of the system.  \n\nSince lmbench gives a good overview of many important system features,\nusers may see the performance of the system as a whole, and can\nsee where tradeoffs may have been made.  This is the driving force\nbehind the publication restriction: any idiot can optimize certain\nsubsystems while completely destroying overall system performance.\nIf said idiot publishes *only* the numbers relating to the optimized\nsubsystem, then the costs of the optimization are hidden and readers\nwill mistakenly believe that the optimization is a good idea.  By\nincluding the publication restriction readers would be able to\ndetect that the optimization improved the subsystem performance\nwhile damaging the rest of the system performance and would be able\nto make an informed decision as to the merits of the optimization.\n\nNote that these restrictions only apply to *publications*.  We\nintend and encourage lmbench's use during design, development,\nand tweaking of systems and applications.  If you are tuning the\nlinux or BSD TCP stack, then by all means, use the networking\nbenchmarks to evaluate the performance effects of various \nmodifications; Swap results with other developers; use the\nnetworking numbers in isolation.  The restrictions only kick\nin when you go to *publish* the results.  If you sped up the\nTCP stack by a factor of 2 and want to publish a paper with the\nvarious tweaks or algorithms used to accomplish this goal, then\nyou can publish the networking numbers to show the improvement.\nHowever, the paper *must* also include the rest of the standard\nlmbench numbers to show how your tweaks may (or may not) have\nimpacted the rest of the system.  The full set of numbers may\nbe included in an appendix, but they *must* be included in the\npaper.\n\nThis helps protect the community from adopting flawed technologies\nbased on incomplete data.  It also helps protect the community from\nmisleading marketing which tries to sell systems based on partial\n(skewed) lmbench performance results.  \n\nWe have seen many cases in the past where partial or misleading\nbenchmark results have caused great harm to the community, and\nwe want to ensure that our benchmark is not used to perpetrate\nfurther harm and support false or misleading claims.\n\n\n"
  },
  {
    "path": "wa/workloads/lmbench/bin/README",
    "content": "This directory contains a subset of lmbench tests supported by Workload Automation.\n\nThe binaries are provided under the terms of the GNU General Public License, Version 2, \nconsistent with lmbench's additional restrictions. \nRefer to COPYING and COPYING-2 files for details.\n\nThe binaries were built from lmbench-3.0-a9 source code, available publically from\n http://sourceforge.net/projects/lmbench/, specifically\n http://sourceforge.net/projects/lmbench/files/development/\n\nThe binaries provided here are built and statically linked with the stable \nLinaro GNU 4.9 toolchain from November 2014.\nSource available from: \n   http://releases.linaro.org/14.11/components/toolchain/gcc-linaro/4.9\nBinaries available from:\n   https://releases.linaro.org/14.11/components/toolchain/binaries/aarch64-linux-gnu\n   https://releases.linaro.org/14.11/components/toolchain/binaries/arm-linux-gnueabihf\n"
  },
  {
    "path": "wa/workloads/manual/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# pylint: disable=E1101,W0201,E0203\nimport os\n\nfrom wa import Workload, Parameter, ConfigError\nfrom wa.utils.misc import getch\n\n\nclass ManualWorkload(Workload):\n\n    name = 'manual'\n    description = \"\"\"\n    Yields control to the user, either for a fixed period or based on user\n    input, to perform custom operations on the device, which workload\n    automation does not know of.\n\n    \"\"\"\n    default_duration = 30\n\n    parameters = [\n        Parameter('duration', kind=int, default=None,\n                  description=\"\"\"\n                  Control of the devices is yielded for the duration (in\n                  seconds) specified.  If not specified, ``user_triggered`` is\n                  assumed.\n                  \"\"\"),\n        Parameter('user_triggered', kind=bool, default=None,\n                  description=\"\"\"\n                  If ``True``, WA will wait for user input after starting the\n                  workload; otherwise fixed duration is expected. Defaults to\n                  ``True`` if ``duration`` is not specified, and ``False``\n                  otherwise.\n                  \"\"\"),\n        Parameter('view', default='SurfaceView',\n                  description=\"\"\"\n                  Specifies the View of the workload. This enables instruments\n                  that require a View to be specified, such as the ``fps``\n                  instrument.  This is required for using \"SurfaceFlinger\" to\n                  collect FPS statistics and is primarily used on devices pre\n                  API level 23.\n                  \"\"\"),\n        Parameter('package',\n                  description=\"\"\"\n                  Specifies the package name of the workload. This enables\n                  instruments that require a Package to be specified, such as\n                  the ``fps`` instrument. This allows for \"gfxinfo\" to be used\n                  and is the preferred method of collection for FPS statistics\n                  on devices API level 23+.\n                  \"\"\"),\n    ]\n\n    def validate(self):\n        if self.duration is None:\n            if self.user_triggered is None:\n                self.user_triggered = True\n            elif self.user_triggered is False:\n                self.duration = self.default_duration\n        if self.user_triggered and self.duration:\n            message = 'Manual Workload can either specify duration or be user triggered, but not both'\n            raise ConfigError(message)\n        if not self.user_triggered and not self.duration:\n            raise ConfigError('Either user_triggered must be ``True`` or duration must be > 0.')\n\n    def setup(self, context):\n        self.logger.info('Any setup required by your workload should be done now.')\n        self.logger.info('As soon as you are done hit any key and wait for the message')\n        self.logger.info('\"START NOW!\" to begin your manual workload.')\n        self.logger.info('')\n        self.logger.info('hit any key to finalize your setup...')\n        getch()\n\n    def run(self, context):\n        self.logger.info('START NOW!')\n        if self.duration:\n            self.target.sleep(self.duration)\n        elif self.user_triggered:\n            self.logger.info('')\n            self.logger.info('hit any key to end your workload execution...')\n            getch()\n        else:\n            raise ConfigError('Illegal parameters for manual workload')\n        self.logger.info('DONE!')\n"
  },
  {
    "path": "wa/workloads/meabo/__init__.py",
    "content": "#    Copyright 2016-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport re\nimport sys\n\nfrom wa import Workload, Parameter, Executable, ConfigError, WorkloadError\nfrom wa.utils.exec_control import once\nfrom wa.utils.types import list_of_ints\n\n\nphase_start_regex = re.compile(r\"Starting phase\\s+(?P<phase>\\d+)\")\ncounter_value_regex = re.compile(r\"Thread\\s+(?P<thread>\\d+)\\s+(?P<name>\\w+)\\svalue\\s+\\=\\s+(?P<value>\\d+)\")\nduration_regex = re.compile(r\"Phase\\s+(?P<phase>\\d+)[\\s\\w\\(\\)]+\\:\\s+(?P<duration>\\d+)\")\n\n\nclass Meabo(Workload):\n\n    name = 'meabo'\n    description = '''\n    A multi-phased multi-purpose micro-benchmark. The micro-benchmark is\n    composed of 10 phases that perform various generic calculations (from\n    memory to compute intensive).\n\n    It is a highly configurable tool which can be used for energy efficiency\n    studies, ARM big.LITTLE Linux scheduler analysis and DVFS studies. It can\n    be used for other benchmarking as well.\n\n    All floating-point calculations are double-precision.\n\n    |   Phase 1: Floating-point & integer computations with good data locality\n    |   Phase 2: Vector multiplication & addition, 1 level of indirection in 1\n    |            source vector\n    |   Phase 3: Vector scalar addition and reductions\n    |   Phase 4: Vector addition\n    |   Phase 5: Vector addition, 1 level of indirection in both source vectors\n    |   Phase 6: Sparse matrix-vector multiplication\n    |   Phase 7: Linked-list traversal\n    |   Phase 8: Electrostatic force calculations\n    |   Phase 9: Palindrome calculations\n    |   Phase 10: Random memory accesses\n\n    For more details and benchmark source, see:\n\n        https://github.com/ARM-software/meabo\n\n    .. note:: current implementation of automation relies on the executable to\n              be either statically linked or for all necessary depencies to be\n              installed on the target.\n\n    '''\n\n    parameters = [\n        Parameter(\n            'array_size',\n            kind=int,\n            description='''\n            Size of arrays used in Phases 1, 2, 3, 4 and 5.\n            ''',\n            constraint=lambda x: x > 0,\n            default=1048576,\n        ),\n        Parameter(\n            'num_rows',\n            kind=int,\n            aliases=['nrow'],\n            description='''\n            Number of rows for the sparse matrix used in Phase 6.\n            ''',\n            constraint=lambda x: x > 0,\n            default=16384,\n        ),\n        Parameter(\n            'num_cols',\n            kind=int,\n            aliases=['ncol'],\n            description='''\n            Number of columns for the sparse matrix used in Phase 6.\n            ''',\n            constraint=lambda x: x > 0,\n            default=16384,\n        ),\n        Parameter(\n            'loops',\n            kind=int,\n            aliases=['num_iterations'],\n            description='''\n            Number of iterations that core loop is executed.\n            ''',\n            constraint=lambda x: x > 0,\n            default=1000,\n        ),\n        Parameter(\n            'block_size',\n            kind=int,\n            description='''\n            Block size used in Phase 1.\n            ''',\n            constraint=lambda x: x > 0,\n            default=8,\n        ),\n        Parameter(\n            'num_cpus',\n            kind=int,\n            description='''\n            Number of total CPUs that the application can bind threads to.\n            ''',\n            constraint=lambda x: x > 0,\n            default=6,\n        ),\n        Parameter(\n            'per_phase_cpu_ids',\n            kind=list_of_ints,\n            description='''\n            Sets which cores each phase is run on.\n            ''',\n            constraint=lambda x: all(v >= -1 for v in x),\n            default=[-1] * 10,\n        ),\n        Parameter(\n            'num_hwcntrs',\n            kind=int,\n            description='''\n            Only available when using PAPI. Controls how many hardware counters\n            PAPI will get access to.\n            ''',\n            constraint=lambda x: x >= 0,\n            default=7,\n        ),\n        Parameter(\n            'run_phases',\n            kind=list_of_ints,\n            description='''\n            Controls which phases to run.\n            ''',\n            constraint=lambda x: all(0 < v <= 10 for v in x),\n            default=list(range(1, 11)),\n        ),\n        Parameter(\n            'threads',\n            kind=int,\n            aliases=['num_threads'],\n            description='''\n            Controls how many threads the application will be using.\n            ''',\n            constraint=lambda x: x >= 0,\n            default=0,\n        ),\n        Parameter(\n            'bind_to_cpu_set',\n            kind=int,\n            description='''\n            Controls whether threads will be bound to a core set, or each\n            individual thread will be bound to a specific core within the core\n            set.\n            ''',\n            constraint=lambda x: 0 <= x <= 1,\n            default=1,\n        ),\n        Parameter(\n            'llist_size',\n            kind=int,\n            description='''\n            Size of the linked list available for each thread.\n            ''',\n            constraint=lambda x: x > 0,\n            default=16777216,\n        ),\n        Parameter(\n            'num_particles',\n            kind=int,\n            description='''\n            Number of particles used in Phase 8.\n            ''',\n            constraint=lambda x: x > 0,\n            default=1048576,\n        ),\n        Parameter(\n            'num_palindromes',\n            kind=int,\n            description='''\n            Number of palindromes used in Phase 9.\n            ''',\n            constraint=lambda x: x > 0,\n            default=1024,\n        ),\n        Parameter(\n            'num_randomloc',\n            kind=int,\n            description='''\n            Number of random memory locations accessed in Phase 10.\n            ''',\n            constraint=lambda x: x > 0,\n            default=2097152,\n        ),\n        Parameter(\n            'timeout',\n            kind=int,\n            description=\"\"\"\n            Timeout for execution of the test.\n            \"\"\",\n            aliases=['run_timeout'],\n            constraint=lambda x: x > 0,\n            default=60 * 45,\n        ),\n    ]\n\n    options = [\n        ('-s', 'array_size'),\n        ('-B', 'bind_to_cpu_set'),\n        ('-b', 'block_size'),\n        ('-l', 'llist_size'),\n        ('-c', 'num_col'),\n        ('-r', 'num_row'),\n        ('-C', 'num_cpus'),\n        ('-H', 'num_hwcntrs'),\n        ('-i', 'loops'),\n        ('-x', 'num_palindromes'),\n        ('-p', 'num_particles'),\n        ('-R', 'num_randomloc'),\n        ('-T', 'threads'),\n    ]\n\n    def validate(self):\n        if len(self.run_phases) != len(self.per_phase_cpu_ids):\n            msg = \"Number of phases doesn't match the number of CPU mappings\"\n            raise ConfigError(msg)\n\n    def initialize(self, context):\n        self._install_executable(context)\n        self._build_command()\n\n    def setup(self, context):\n        self.output = None\n\n    def run(self, context):\n        self.output = self.target.execute(self.command,\n                                          timeout=self.timeout)\n\n    def update_output(self, context):\n        if self.output is None:\n            self.logger.warning('Did not collect output')\n            return\n\n        outfile = os.path.join(context.output_directory, 'meabo-output.txt')\n        with open(outfile, 'wb') as wfh:\n            wfh.write(self.output.encode('utf-8'))\n        context.add_artifact('meabo-output', outfile, kind='raw')\n\n        cur_phase = 0\n        for line in self.output.split('\\n'):\n            line = line.strip()\n\n            match = phase_start_regex.search(line)\n            if match:\n                cur_phase = match.group('phase')\n\n            match = counter_value_regex.search(line)\n            if match:\n                if cur_phase == 0:\n                    msg = 'Matched thread performance counters outside of phase!'\n                    raise WorkloadError(msg)\n                name = 'phase_{}_thread_{}_{}'.format(cur_phase,\n                                                      match.group('thread'),\n                                                      match.group('name'))\n                context.add_metric(name, int(match.group('value')))\n\n            match = duration_regex.search(line)\n            if match:\n                context.add_metric(\"phase_{}_duration\".format(match.group('phase')),\n                                   int(match.group('duration')), units=\"ns\")\n\n    def finalize(self, context):\n        if self.uninstall:\n            self._uninstall_executable()\n\n    def _build_command(self):\n        self.command = self.target_exe\n\n        # We need to calculate the phase mask\n        phase_mask = 0\n        for phase in self.run_phases:\n            phase_mask |= 1 << (phase - 1)\n\n        self.command += ' -P {:d}'.format(phase_mask)\n\n        # Set the CPU ids for each phase we are running\n        for phase, cpu_id in zip(self.run_phases, self.per_phase_cpu_ids):\n            self.command += ' -{0:1d} {1:d}'.format(phase, cpu_id)\n\n        # We need to append extra arguments to the command based on the\n        # parameters passed in from the agenda.\n        for option, param_name in self.options:\n            param_value = getattr(self, param_name, None)\n            if param_value is not None:\n                self.command += ' {} {}'.format(option, param_value)\n\n    @once\n    def _install_executable(self, context):\n        resource = Executable(self, self.target.abi, 'meabo')\n        host_exe = context.get_resource(resource)\n        Meabo.target_exe = self.target.install(host_exe)\n\n    @once\n    def _uninstall_executable(self):\n        self.target.uninstall(self.target_exe)\n"
  },
  {
    "path": "wa/workloads/memcpy/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\nimport re\n\nfrom wa import Workload, Parameter, Executable\nfrom wa.utils.exec_control import once\nfrom wa.utils.types import cpu_mask\n\n\nTHIS_DIR = os.path.dirname(__file__)\n\n\nRESULT_REGEX = re.compile('Total time: ([\\d.]+) s.*Bandwidth: ([\\d.]+) MB/s', re.S)\n\n\nclass Memcpy(Workload):\n\n    name = 'memcpy'\n    description = \"\"\"\n    Runs memcpy in a loop.\n\n    This will run memcpy in a loop for a specified number of times on a buffer\n    of a specified size. Additionally, the affinity of the test can be set to\n    one or more specific cores.\n\n    This workload is single-threaded. It generates no scores or metrics by\n    itself.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('buffer_size', kind=int, default=1024 * 1024 * 5,\n                  description='''\n                  Specifies the size, in bytes, of the buffer to be copied.\n                  '''),\n        Parameter('loops', kind=int, default=1000, aliases=['iterations'],\n                  description='''\n                  Specfies the number of iterations that will be performed.\n                  '''),\n        Parameter('cpus', kind=cpu_mask, default=0,\n                  description='''\n                  The cpus for which the affinity of the test\n                  process should be set, specified as a mask, as a list of\n                  cpus or a sysfs-style string. If not specified, all available\n                  cores will be used.\n                  '''),\n    ]\n\n    @once\n    def initialize(self, context):\n        self.binary_name = 'memcpy'\n        resource = Executable(self, self.target.abi, self.binary_name)\n        host_binary = context.get_resource(resource)\n        Memcpy.target_exe = self.target.install_if_needed(host_binary)\n\n    def setup(self, context):\n        self.command = '{} -i {} -s {}'.format(Memcpy.target_exe, self.loops, self.buffer_size)\n        for c in self.cpus.list():\n            self.command += ' -c {}'.format(c)\n        self.result = None\n\n    def run(self, context):\n        self.result = self.target.execute(self.command, timeout=300)\n\n    def extract_results(self, context):\n        if self.result:\n            match = RESULT_REGEX.search(self.result)\n            context.add_metric('time', float(match.group(1)), 'seconds', lower_is_better=True)\n            context.add_metric('bandwidth', float(match.group(2)), 'MB/s')\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall('memcpy')\n"
  },
  {
    "path": "wa/workloads/memcpy/src/build.sh",
    "content": "#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n${CROSS_COMPILE}gcc -static memcopy.c -o memcopy\n"
  },
  {
    "path": "wa/workloads/memcpy/src/memcopy.c",
    "content": "/*    Copyright 2013-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\n#define _GNU_SOURCE\n#include <stdlib.h>\n#include <stdio.h>\n#include <string.h>\n#include <sched.h>\n#include <unistd.h>\n#include <sys/syscall.h>\n#include <pthread.h>\n#include <time.h>\n\nconst int MAX_CPUS = 8;\nconst int DEFAULT_ITERATIONS = 1000;\nconst int DEFAULT_BUFFER_SIZE = 1024 * 1024 * 5;\n\nint set_affinity(size_t cpus_size, int* cpus)\n{\n\tint i;\n\tint mask = 0;\n\n\tfor(i = 0; i < cpus_size; ++i)\n\t{\n\t\tmask |= 1 << cpus[i];\n\t}\n\t\n\treturn syscall(__NR_sched_setaffinity, 0, sizeof(mask), &mask);\n}\n\nint main(int argc, char** argv)\n{\n\tint cpus[MAX_CPUS];\n\tint next_cpu = 0;\n\tint iterations = DEFAULT_ITERATIONS;\n\tint buffer_size = DEFAULT_BUFFER_SIZE;\n\t\n\tint c;\n\twhile ((c = getopt(argc, argv, \"i:c:s:\")) != -1)\n\t\tswitch (c)\n\t\t{\n\t\tcase 'c':\n\t\t\tcpus[next_cpu++] = atoi(optarg);\n\t\t\tif (next_cpu == MAX_CPUS)\n\t\t\t{\n\t\t\t\tfprintf(stderr, \"Max CPUs exceeded.\");\n\t\t\t\tabort();\n\t\t\t}\n\t\t\tbreak;\n\t\tcase 'i':\n\t\t\titerations = atoi(optarg);\n\t\t\tbreak;\n\t\tcase 's':\n\t\t\tbuffer_size = atoi(optarg);\n\t\t\tbreak;\n\t\tdefault:\n\t\t\tabort();\n\t\t\tbreak;\n\t\t}\n\n\tint ret;\n\tif (next_cpu != 0)\n\t\tif (ret = set_affinity(next_cpu, cpus))\n\t\t{\n\t\t\tfprintf(stderr, \"sched_setaffinity returnred %i.\", ret);\n\t\t\tabort();\n\t\t}\n\t\n\tchar* source  = malloc(buffer_size);\n\tchar* dest = malloc(buffer_size);\n\n\tstruct timespec before, after;\n\tif (clock_gettime(CLOCK_MONOTONIC, &before))\n\t{\n\t \tfprintf(stderr, \"Could not get start time.\");\n\t\tabort();\n\t}\n\n\tint i;\n\tfor (i = 0; i < iterations; ++i)\n\t{\n\t\tmemcpy(dest, source, buffer_size);\n\t}\n\n\tif (clock_gettime(CLOCK_MONOTONIC, &after))\n\t{\n\t \tfprintf(stderr, \"Could not get end time.\");\n\t\tabort();\n\t}\n\n\tfree(dest);\n\tfree(source);\n\n\tlong delta_sec =  (long)(after.tv_sec - before.tv_sec);\n\tlong delta_nsec = after.tv_nsec - before.tv_nsec;\n\tdouble delta = (double)delta_sec + delta_nsec / 1e9;\n\tprintf(\"Total time: %f s\\n\", delta);\n\tprintf(\"Bandwidth: %f MB/s\\n\", buffer_size / delta * iterations / 1e6);\n\n\treturn 0;\n}\n"
  },
  {
    "path": "wa/workloads/mongoperf/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport json\nimport os\nfrom collections import defaultdict\n\nimport pandas as pd\n\nfrom wa import Workload, Parameter, ConfigError, TargetError, WorkloadError\nfrom wa.utils.exec_control import once\n\n\nclass Mongoperf(Workload):\n\n    name = 'mongoperf'\n    description = \"\"\"\n    A utility to check disk I/O performance independently of MongoDB.\n\n    It times tests of random disk I/O and presents the results. You can use\n    mongoperf for any case apart from MongoDB. The mmf true mode is completely\n    generic.\n\n    .. note:: ``mongoperf`` seems to ramp up threads in powers of two over a\n              period of tens of seconds (there doesn't appear to be a way to\n              change that). Bear this in mind when setting the ``duration``.\n    \"\"\"\n\n    parameters = [\n        Parameter('duration', kind=int, default=300,\n                  description=\"\"\"\n                  Duration of of the workload.\n                  \"\"\"),\n        Parameter('threads', kind=int, default=16,\n                  description=\"\"\"\n                  Defines the number of threads mongoperf will use in the test.\n                  To saturate the system storage system you will need\n                  multiple threads.\n                  \"\"\"),\n        Parameter('file_size_mb', kind=int, default=1,\n                  description=\"\"\"\n                  Test file size in MB.\n                  \"\"\"),\n        Parameter('sleep_micros', kind=int, default=0,\n                  description=\"\"\"\n                  mongoperf will pause for this number of microseconds  divided\n                  by the the number of threads between each operation.\n                  \"\"\"),\n        Parameter('mmf', kind=bool, default=True,\n                  description=\"\"\"\n                  When ``True``,  use memory mapped files for the tests.\n                  Generally:\n\n                  - when mmf is ``False``, mongoperf tests direct, physical, I/O,\n                    without caching. Use a large file size to test heavy random\n                    I/O load and to avoid I/O coalescing.\n                  - when mmf is ``True``, mongoperf runs tests of the caching\n                    system, and can use normal file system cache. Use mmf in\n                    this mode to test file system cache behavior with memory\n                    mapped files.\n                  \"\"\"),\n        Parameter('read', kind=bool, default=True,\n                  aliases=['r'],\n                  description=\"\"\"\n                  When ``True``,  perform reads as part of the test. Either\n                  ``read`` or ``write`` must be ``True``.\n                  \"\"\"),\n        Parameter('write', kind=bool, default=True,\n                  aliases=['w'],\n                  description=\"\"\"\n                  When ``True``,  perform writes as part of the test. Either\n                  ``read`` or ``write`` must be ``True``.\n                  \"\"\"),\n        Parameter('rec_size_kb', kind=int, default=4,\n                  description=\"\"\"\n                  The size of each write operation\n                  \"\"\"),\n        Parameter('sync_delay', kind=int, default=0,\n                  description=\"\"\"\n                  Seconds between disk flushes. Only use this if ``mmf`` is set\n                  to ``True``.\n                  \"\"\"),\n    ]\n\n    def validate(self):\n        if not self.read and not self.write:\n            raise ConfigError('Either \"read\" or \"write\" must be True.')\n        if not self.mmf and self.sync_delay:\n            raise ConfigError('sync_delay can only be set if mmf is True')\n\n    @once\n    def initialize(self, context):\n        try:\n            self.target.execute('mongoperf -h')\n        except TargetError:\n            raise WorkloadError('Mongoperf must be installed and in $PATH on the target.')\n\n    def setup(self, context):\n        config = {}\n        config['nThreads'] = self.threads\n        config['fileSizeMB'] = self.file_size_mb\n        config['sleepMicros'] = self.sleep_micros\n        config['mmf'] = self.mmf\n        config['r'] = self.read\n        config['w'] = self.write\n        config['recSizeKB'] = self.rec_size_kb\n        config['syncDelay'] = self.sync_delay\n\n        config_text = json.dumps(config)\n        self.outfile = self.target.get_workpath('mongperf.out')\n        self.command = 'echo \"{}\" | mongoperf > {}'.format(config_text, self.outfile)\n\n    def run(self, context):\n        self.target.kick_off(self.command)\n        self.target.sleep(self.duration)\n        self.target.killall('mongoperf', signal='SIGTERM')\n\n    def extract_results(self, context):\n        host_outfile = os.path.join(context.output_directory, 'mongoperf.out')\n        self.target.pull(self.outfile, host_outfile)\n        context.add_artifact('mongoperf-output', host_outfile, kind='raw')\n\n    def update_output(self, context):\n        host_file = context.get_artifact_path('mongoperf-output')\n        results = defaultdict(list)\n        threads = None\n        with open(host_file) as fh:\n            for line in fh:\n                if 'new thread,' in line:\n                    threads = int(line.split()[-1])\n                elif 'ops/sec' in line:\n                    results[threads].append(int(line.split()[0]))\n\n        if not results:\n            raise WorkloadError('No mongoperf results found in the output.')\n\n        for threads, values in results.items():\n            rs = pd.Series(values)\n            context.add_metric('ops_per_sec', rs.mean(),\n                               classifiers={'threads': threads})\n            context.add_metric('ops_per_sec_std', rs.std(), lower_is_better=True,\n                               classifiers={'threads': threads})\n\n    def teardown(self, context):\n        if self.cleanup_assets:\n            self.target.remove(self.outfile)\n"
  },
  {
    "path": "wa/workloads/motionmark/__init__.py",
    "content": "#    Copyright 2014-2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport os\nimport re\n\nfrom wa import UiautoWorkload, Parameter\nfrom wa.framework.exception import ValidationError, WorkloadError\nfrom wa.utils.types import list_of_strs\nfrom wa.utils.misc import unique\n\n\nclass Motionmark(UiautoWorkload):\n\n    name = 'motionmark'\n\n    description = '''\n    A workload to execute the motionmark web based benchmark\n\n    MotionMark is a graphics benchmark that measures a browser capability to animate complex scenes at a target frame rate\n\n    Test description:\n    1. Open browser application\n    2. Navigate to the motionmark website - http://browserbench.org/MotionMark/\n    3. Execute the benchmark\n    '''\n\n    requires_network = True\n\n    regex = [re.compile(r'Multiply Score (.+)'),\n             re.compile(r'Canvas Score (.+)'),\n             re.compile(r'Leaves Score (.+)'),\n             re.compile(r'Paths Score (.+)'),\n             re.compile(r'Canvas Lines Score (.+)'),\n             re.compile(r'Focus Score (.+)'),\n             re.compile(r'Images Score (.+)'),\n             re.compile(r'Design Score (.+)'),\n             re.compile(r'Suits Score (.+)')]\n    score_regex = re.compile(r'.*?([\\d.]+).*')\n\n    def __init__(self, target, **kwargs):\n        super(Motionmark, self).__init__(target, **kwargs)\n        self.gui.timeout = 1500\n\n    def setup(self, context):\n        super(Motionmark, self).setup(context)\n        self.target.open_url('https://browserbench.org/MotionMark/')\n\n    def update_output(self, context):\n        super(Motionmark, self).update_output(context)\n        num_unprocessed_results = len(self.regex)\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            for line in fh:\n                for regex in self.regex:\n                    match = regex.search(line)\n                    # Check if we have matched the score string in logcat\n                    if match:\n                        score_match = self.score_regex.search(match.group(1))\n                        # Check if there is valid number found for the score.\n                        if score_match:\n                            result = float(score_match.group(1))\n                        else:\n                            result = float('NaN')\n                        entry = regex.pattern.rsplit(None, 1)[0]\n                        context.add_metric(entry, result, 'Score', lower_is_better=False)\n                        num_unprocessed_results -= 1\n        if num_unprocessed_results > 0:\n            msg = \"The Motionmark workload has failed. Expected {} scores, Missing {} scores.\"\n            raise WorkloadError(msg.format(len(self.regex), num_unprocessed_results))\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.motionmark\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.motionmark\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/app/src/main/java/com/arm/wa/uiauto/motionmark/UiAutomation.java",
    "content": "/*    Copyright 2014-2019 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.motionmark;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiScrollable;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport android.util.Log;\n\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    private int networkTimeoutSecs = 30;\n    private long networkTimeout =  TimeUnit.SECONDS.toMillis(networkTimeoutSecs);\n    public static String TAG = \"UXPERF\";\n    public boolean textenabled = false;\n\n    @Before\n    public void initialize(){\n        initialize_instrumentation();\n    }\n\n    @Test\n    public void setup() throws Exception{\n        clearFirstRun();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        runBenchmark();\n    }\n\n    @Test\n    public void teardown() throws Exception{\n        clearTabs();\n        unsetScreenOrientation();\n    }\n\n    public void clearFirstRun() throws Exception {\n        UiObject accept =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/terms_accept\")\n                .className(\"android.widget.Button\"));\n        if (accept.exists()){\n            accept.click();\n            UiObject negative =\n                mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/negative_button\")\n                    .className(\"android.widget.Button\"));\n            negative.waitForExists(100000);\n            negative.click();\n        }\n    }\n\n    public void runBenchmark() throws Exception {\n        setScreenOrientation(ScreenOrientation.LANDSCAPE);\n        UiScrollable list = new UiScrollable(new UiSelector().scrollable(true));\n\n        UiObject start =\n            mDevice.findObject(new UiSelector().text(\"Run Benchmark\")\n                .className(\"android.widget.Button\"));\n        list.swipeUp(10);\n        if (start.exists()){\n            start.click();\n        } else {\n            UiObject startDesc =\n                mDevice.findObject(new UiSelector().description(\"Run Benchmark\")\n                    .className(\"android.widget.Button\"));\n            startDesc.click();\n        }\n\n        UiObject results =\n            mDevice.findObject(new UiSelector().resourceId(\"results-score\")\n                .className(\"android.widget.GridView\"));\n        results.waitForExists(2100000);\n\n        setScreenOrientation(ScreenOrientation.PORTRAIT);\n\n        UiObject multiply = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(2))\n            .getChild(new UiSelector().index(0));            \n        Log.d(TAG, \"Multiply Score \" + multiply.getText());\n\n        UiObject canvas = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(3))\n            .getChild(new UiSelector().index(0));\n        Log.d(TAG, \"Canvas Score \" + canvas.getText());\n\n        UiObject leaves = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(4))\n            .getChild(new UiSelector().index(0));\n        Log.d(TAG, \"Leaves Score \" + leaves.getText());\n\n        UiObject paths = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(5))\n            .getChild(new UiSelector().index(0));\n        Log.d(TAG, \"Paths Score \" + paths.getText());\n\n        UiObject canvaslines = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(6))\n            .getChild(new UiSelector().index(0));\n        if (!canvaslines.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(canvaslines);\n        }\n        Log.d(TAG, \"Canvas Lines Score \" + canvaslines.getText());\n\n        UiObject focus = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(7))\n            .getChild(new UiSelector().index(0));\n        if (!focus.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(focus);\n        }\n        Log.d(TAG, \"Focus Score \" + focus.getText());\n\n        UiObject images = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(8))\n            .getChild(new UiSelector().index(0));\n        if (!images.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(images);\n        }\n        Log.d(TAG, \"Images Score \" + images.getText());\n\n        UiObject design = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(9))\n            .getChild(new UiSelector().index(0));\n        if (!design.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(design);\n        }\n        Log.d(TAG, \"Design Score \" + design.getText());\n\n        UiObject suits = \n            mDevice.findObject(new UiSelector().resourceId(\"results-score\"))\n            .getChild(new UiSelector().index(10))\n            .getChild(new UiSelector().index(0));\n        if (!suits.exists() && list.waitForExists(60)) {\n            list.scrollIntoView(suits);\n        }\n        Log.d(TAG, \"Suits Score \" + suits.getText());\n    }\n\n    public void clearTabs() throws Exception {\n        UiObject tabselector =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/tab_switcher_button\")\n                .className(\"android.widget.ImageButton\"));\n        if (!tabselector.exists()){\n            return;\n        }\n        tabselector.click();\n        UiObject menu =\n            mDevice.findObject(new UiSelector().resourceId(\"com.android.chrome:id/menu_button\")\n                .className(\"android.widget.ImageButton\"));\n        menu.click();\n        UiObject closetabs =\n            mDevice.findObject(new UiSelector().textContains(\"Close all tabs\"));\n        closetabs.click();\n    }\n}\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.motionmark\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/motionmark/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/openssl/__init__.py",
    "content": "#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom wa import Workload, Parameter, TargetError, WorkloadError, Executable, Alias\nfrom wa.utils.exec_control import once\n\n\nBLOCK_SIZES = [16, 64, 256, 1024, 8192, 16384]\n\nECD = ['secp160r1', 'nistp192', 'nistp224', 'nistp256', 'nistp384', 'nistp521',\n       'nistk163', 'nistk233', 'nistk283', 'nistk409', 'nistk571', 'nistb163',\n       'nistb233', 'nistb283', 'nistb409', 'nistb571', 'curve25519']\n\nCIPHER_PKI = ['rsa', 'dsa', 'ecdh', 'ecdsa']\nEVP_NEW = ['aes-128-cbc', 'aes-192-cbc', 'aes-256-cbc', 'aes-128-gcm', 'aes-192-gcm',\n           'aes-256-gcm', 'sha1', 'sha256', 'sha384', 'sha512']\n\n\nclass Openssl(Workload):\n\n    name = 'openssl'\n\n    description = '''\n    Benchmark Openssl algorithms using Openssl's speed command.\n\n    The command tests how long it takes to perfrom typical SSL operations using\n    a range of supported algorithms and ciphers.\n\n    By defalt, this workload will use openssl installed on the target, however\n    it is possible to provide an alternative binary as a workload resource.\n\n    '''\n\n    parameters = [\n        Parameter('algorithm', default='aes-256-cbc',\n                  allowed_values=EVP_NEW + CIPHER_PKI,\n                  description='''\n                  Algorithm to benchmark.\n                  '''),\n        Parameter('threads', kind=int, default=1,\n                  description='''\n                  The number of threads to use\n                  '''),\n        Parameter('use_system_binary', kind=bool, default=True,\n                  description='''\n                  If ``True``, the system Openssl binary will be used.\n                  Otherwise, use the binary provided in the workload\n                  resources.\n                  '''),\n    ]\n\n    aliases = [Alias('ossl-' + algo, algorithm=algo)\n               for algo in EVP_NEW + CIPHER_PKI]\n\n    @once\n    def initialize(self, context):\n        if self.use_system_binary:\n            try:\n                cmd = '{0} md5sum < $({0} which openssl)'\n                output = self.target.execute(cmd.format(self.target.busybox))\n                md5hash = output.split()[0]\n                version = self.target.execute('openssl version').strip()\n                context.update_metadata('hashes', 'openssl', md5hash)\n                context.update_metadata('versions', 'openssl', version)\n            except TargetError:\n                msg = 'Openssl does not appear to be installed on target.'\n                raise WorkloadError(msg)\n            Openssl.target_exe = 'openssl'\n        else:\n            resource = Executable(self, self.target.abi, 'openssl')\n            host_exe = context.get_resource(resource)\n            Openssl.target_exe = self.target.install(host_exe)\n\n    def setup(self, context):\n        self.output = None\n        if self.algorithm in EVP_NEW:\n            cmd_template = '{} speed -mr -multi {} -evp {}'\n        else:\n            cmd_template = '{} speed -mr -multi {} {}'\n        self.command = cmd_template.format(self.target_exe, self.threads, self.algorithm)\n\n    def run(self, context):\n        self.output = self.target.execute(self.command)\n\n    def extract_results(self, context):\n        if not self.output:\n            return\n\n        outfile = os.path.join(context.output_directory, 'openssl.output')\n        with open(outfile, 'w') as wfh:\n            wfh.write(self.output)\n        context.add_artifact('openssl-output', outfile, 'raw', 'openssl\\'s stdout')\n\n    def update_output(self, context):\n        if not self.output:\n            return\n\n        for line in self.output.split('\\n'):\n            line = line.strip()\n\n            if not line.startswith('+F'):\n                continue\n\n            parts = line.split(':')\n            if parts[0] == '+F':  # evp ciphers\n                for bs, value in zip(BLOCK_SIZES, list(map(float, parts[3:]))):\n                    value = value / 2**20  # to MB\n                    context.add_metric('score', value, 'MB/s',\n                                       classifiers={'block_size': bs})\n            elif parts[0] in ['+F2', '+F3']:  # rsa, dsa\n                key_len = int(parts[2])\n                sign = float(parts[3])\n                verify = float(parts[4])\n                context.add_metric('sign', sign, 'seconds',\n                                   classifiers={'key_length': key_len})\n                context.add_metric('verify', verify, 'seconds',\n                                   classifiers={'key_length': key_len})\n            elif parts[0] == '+F4':  # ecdsa\n                ec_idx = int(parts[1])\n                key_len = int(parts[2])\n                sign = float(parts[3])\n                verify = float(parts[4])\n                context.add_metric('sign', sign, 'seconds',\n                                   classifiers={'key_length': key_len,\n                                                'curve': ECD[ec_idx]})\n                context.add_metric('verify', verify, 'seconds',\n                                   classifiers={'key_length': key_len,\n                                                'curve': ECD[ec_idx]})\n            elif parts[0] == '+F5':  # ecdh\n                ec_idx = int(parts[1])\n                key_len = int(parts[2])\n                op_time = float(parts[3])\n                ops_per_sec = float(parts[4])\n                context.add_metric('op', op_time, 'seconds',\n                                   classifiers={'key_length': key_len,\n                                                'curve': ECD[ec_idx]})\n                context.add_metric('ops_per_sec', ops_per_sec, 'Hz',\n                                   classifiers={'key_length': key_len,\n                                                'curve': ECD[ec_idx]})\n            else:\n                self.logger.warning('Unexpected result: \"{}\"'.format(line))\n\n    @once\n    def finalize(self, context):\n        if not self.use_system_binary and self.uninstall:\n            self.target.uninstall('openssl')\n"
  },
  {
    "path": "wa/workloads/pcmark/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport os\nimport re\nimport sys\nimport zipfile\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.framework.exception import WorkloadError\n\n\nclass PcMark(ApkUiautoWorkload):\n\n    name = 'pcmark'\n\n    supported_versions = ['3', '2']\n    package_names = ['com.futuremark.pcmark.android.benchmark']\n    regex_matches = {\n        '2': [re.compile(r'PcmaWebV2Score>([\\d.]+)'),\n              re.compile(r'PcmaVideoEditingScore>([\\d.]+)'),\n              re.compile(r'PcmaDataManipulationScore>([\\d.]+)'),\n              re.compile(r'PcmaPhotoEditingV2Score>([\\d.]+)'),\n              re.compile(r'PcmaWorkv2Score>([\\d.]+)'),\n              re.compile(r'PcmaWritingV2Score>([\\d.]+)')],\n\n        '3': [re.compile(r'PcmaWebV3Score>([\\d.]+)'),\n              re.compile(r'PcmaVideoEditingV3Score>([\\d.]+)'),\n              re.compile(r'PcmaDataManipulationV3Score>([\\d.]+)'),\n              re.compile(r'PcmaPhotoEditingV3Score>([\\d.]+)'),\n              re.compile(r'PcmaWorkv3Score>([\\d.]+)'),\n              re.compile(r'PcmaWritingV3Score>([\\d.]+)')]\n    }\n\n    description = '''\n    A workload to execute the Work x.0 benchmarks within PCMark - https://www.futuremark.com/benchmarks/pcmark-android\n\n    Test description:\n    1. Open PCMark application\n    2. Swipe right to the Benchmarks screen\n    3. Select the Work x.0 benchmark\n    4. If needed, install the Work x.0 benchmark (requires an internet connection)\n    5. Execute the Work x.0 benchmark\n\n    Known working APK versions: 3.0.4061, 2.0.3716\n    '''\n\n    # Do not delete Work x.0 data-set before each run\n    clear_data_on_reset = False\n\n    parameters = [\n        Parameter('version', allowed_values=supported_versions,\n                  description='Specifies which version of the workload should be run.',\n                  override=True)\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(PcMark, self).__init__(target, **kwargs)\n        self.gui.timeout = 1500\n\n    def initialize(self, context):\n        super(PcMark, self).initialize(context)\n        self.major_version = self.version.strip()[0]\n\n    def extract_results(self, context):\n        if self.version.startswith('3'):\n            results_path = self.target.path.join(self.target.package_data_directory, self.package, 'files')\n            result_file = [f for f in self.target.list_directory(results_path, as_root=self.target.is_rooted) if f.endswith(\".zip\")][-1]\n        elif self.version.startswith('2'):\n            results_path = self.target.path.join(self.target.external_storage, \"PCMark for Android\")\n            result_file = self.target.list_directory(results_path)[-1]\n\n        self.result_file = result_file.rstrip()\n        result = self.target.path.join(results_path, result_file)\n        self.target.pull(result, context.output_directory, as_root=self.target.is_rooted)\n        context.add_artifact('pcmark-result', self.result_file, kind='raw')\n\n    def update_output(self, context):\n        expected_results = len(self.regex_matches[self.major_version])\n        zf = zipfile.ZipFile(os.path.join(context.output_directory, self.result_file), 'r').read('Result.xml')\n        zf = zf.decode(sys.stdout.encoding)\n        for line in zf.split('\\n'):\n            for regex in self.regex_matches[self.major_version]:\n                match = regex.search(line)\n                if match:\n                    scores = float(match.group(1))\n                    entry = regex.pattern\n                    entry = entry[:-9]\n                    context.add_metric(entry, scores, lower_is_better=False)\n                    expected_results -= 1\n        if expected_results > 0:\n            msg = \"The PCMark workload has failed. Expected {} scores, Detected {} scores.\"\n            raise WorkloadError(msg.format(len(self.regex_matches[self.major_version]), expected_results))\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.pcmark\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        versionCode 1\n        versionName \"1.0\"\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.pcmark\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/app/src/main/java/com/arm/wa/uiauto/pcmark/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.pcmark;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\nimport android.util.Log;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    private int networkTimeoutSecs = 30;\n    private long networkTimeout =  TimeUnit.SECONDS.toMillis(networkTimeoutSecs);\n    public static String TAG = \"UXPERF\";\n    public static final long WAIT_TIMEOUT_5SEC = TimeUnit.SECONDS.toMillis(5);\n\n    @Before\n    public void initialize(){\n        initialize_instrumentation();\n    }\n\n    @Test\n    public void setup() throws Exception{\n        dismissAndroidVersionPopup();\n        clearPopups();\n        setScreenOrientation(ScreenOrientation.PORTRAIT);\n        loadBenchmarks();\n        installBenchmark();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        runBenchmark();\n    }\n\n    @Test\n    public void teardown() throws Exception{\n        unsetScreenOrientation();\n    }\n\n    private void clearPopups() throws Exception{\n        UiObject permiss =\n            mDevice.findObject(new UiSelector().textMatches(\"(?i)Continue\"));\n        if (permiss.exists()){\n            permiss.click();\n        }\n        UiObject compat =\n            mDevice.findObject(new UiSelector().text(\"OK\"));\n        if (compat.exists()){\n            compat.click();\n            if (compat.exists()){\n                compat.click();\n            }\n        }\n    }\n\n    //Swipe to benchmarks and back to initialise the app correctly\n    private void loadBenchmarks() throws Exception {\n        UiObject title =\n            mDevice.findObject(new UiSelector().text(\"PCMARK\"));\n        title.waitForExists(300000);\n        if (title.exists()){\n            title.click();\n            UiObject benchPage = getUiObjectByText(\"BENCHMARKS\");\n            benchPage.waitForExists(60000);\n            benchPage.click();\n            benchPage.click();\n            UiObject pcmark = getUiObjectByText(\"PCMARK\");\n            pcmark.waitForExists(60000);\n            pcmark.click();\n        } else {\n            throw new UiObjectNotFoundException(\"Application has not loaded within the given time\");\n        }\n    }\n\n    //Install the Work 2.0 Performance Benchmark\n    private void installBenchmark() throws Exception {\n        UiObject benchmark =\n            mDevice.findObject(new UiSelector().descriptionContains(\"INSTALL(\"));\n        if (benchmark.exists()) {\n            benchmark.click();\n        } else {\n            UiObject benchmarktext =\n                mDevice.findObject(new UiSelector().textContains(\"INSTALL(\"));\n            if(benchmarktext.exists()) {\n                benchmarktext.click();\n            }\n        }\n        \n        UiObject install =\n            mDevice.findObject(new UiSelector().description(\"INSTALL\")\n                .className(\"android.view.View\"));\n        if (install.exists()) {\n            install.click();\n        } else {\n            UiObject installtext =\n                mDevice.findObject(new UiSelector().text(\"INSTALL\")\n                       .className(\"android.view.View\"));\n            if (installtext.exists()) {\n                installtext.click();\n            }\n        }\n        UiObject installed =\n            mDevice.findObject(new UiSelector().description(\"RUN\")\n                    .className(\"android.view.View\"));\n            installed.waitForExists(360000);\n            if (!installed.exists()){\n                UiObject installedtext =\n                    mDevice.findObject(new UiSelector().text(\"RUN\")\n                           .className(\"android.view.View\"));\n                    installedtext.waitForExists(1000);\n            }\n    }\n\n    //Execute the Work 2.0 Performance Benchmark - wait up to ten minutes for this to complete\n    private void runBenchmark() throws Exception {\n    \t// After installing, stop screen switching back to landscape. \n    \tsetScreenOrientation(ScreenOrientation.PORTRAIT);\n        UiObject run =\n            mDevice.findObject(new UiSelector().resourceId(\"CONTROL_PCMA_WORK_V2_DEFAULT\")\n                                               .className(\"android.view.View\")\n                                               .childSelector(new UiSelector().text(\"RUN\")\n                                               .className(\"android.view.View\")));\n        if (run.exists()) {\n            run.clickTopLeft();\n        } else {\n            UiObject runtext =\n                mDevice.findObject(new UiSelector().text(\"RUN\"));\n                if (runtext.waitForExists(2000)) {\n                    runtext.click();\n                } else {\n                    UiObject rundesc =\n                        mDevice.findObject(new UiSelector().description(\"RUN\"));\n                    rundesc.click();\n                }\n        }\n        UiObject score =\n            mDevice.findObject(new UiSelector().text(\"SCORE DETAILS\")\n                .className(\"android.widget.TextView\"));\n        if (!score.waitForExists(3600000)){\n            throw new UiObjectNotFoundException(\"Workload has not completed within the given time\");\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.pcmark\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Thu Jun 08 14:26:39 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/pcmark/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/recentfling/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201\n\nimport os\nimport re\n\nfrom wa import Workload, Parameter, File\nfrom wa.framework.exception import WorkloadError, TargetError\nfrom wa.utils.exec_control import once\n\n\nclass Recentfling(Workload):\n\n    name = 'recentfling'\n    description = \"\"\"\n    Tests UI jank on android devices.\n\n    For this workload to work, ``recentfling.sh`` and ``defs.sh`` must be placed\n    in ``~/.workload_automation/dependencies/recentfling/``. These can be found\n    in the `AOSP Git repository <https://android.googlesource.com/platform/system/extras/+/master/tests/workloads>`_.\n\n    To change the apps that are opened at the start of the workload you will need\n    to modify the ``defs.sh`` file. You will need to add your app to ``dfltAppList``\n    and then add a variable called ``{app_name}Activity`` with the name of the\n    activity to launch (where ``{add_name}`` is the name you put into ``dfltAppList``).\n\n    You can get a list of activities available on your device by running\n    ``adb shell pm list packages -f``\n    \"\"\"\n    supported_platforms = ['android']\n\n    parameters = [\n        Parameter('loops', kind=int, default=3,\n                  description=\"The number of test iterations.\"),\n        Parameter('start_apps', kind=bool, default=True,\n                  description=\"\"\"\n                  If set to ``False``,no apps will be started before flinging\n                  through the recent apps list (in which the assumption is\n                  there are already recently started apps in the list.\n                  \"\"\"),\n        Parameter('device_name', kind=str, default=None,\n                  description=\"\"\"\n                  If set, recentfling will use the fling parameters for this\n                  device instead of automatically guessing the device.  This can\n                  also be used if the device is not supported by recentfling,\n                  but its screensize is similar to that of one that is supported.\n\n                  For possible values, check your recentfling.sh.  At the time\n                  of writing, valid values are: 'shamu', 'hammerhead', 'angler',\n                  'ariel', 'mtp8996', 'bullhead' or 'volantis'.\n                  \"\"\"),\n    ]\n\n    @once\n    def initialize(self, context):  # pylint: disable=no-self-use\n        if self.target.get_sdk_version() < 23:\n            raise WorkloadError(\"This workload relies on ``dumpsys gfxinfo`` \\\n                                 only present in Android M and onwards\")\n\n        defs_host = context.get_resource(File(self, \"defs.sh\"))\n        Recentfling.defs_target = self.target.install(defs_host)\n        recentfling_host = context.get_resource(File(self, \"recentfling.sh\"))\n        Recentfling.recentfling_target = self.target.install(recentfling_host)\n\n    def setup(self, context):\n        args = '-i {} '.format(self.loops)\n        if not self.start_apps:\n            args += '-N '\n        if self.device_name is not None:\n            args += '-d {}'.format(self.device_name)\n\n        self.cmd = \"echo $$>{workdir}/pidfile; cd {bindir}; exec ./recentfling.sh {args}; rm {workdir}/pidfile\".format(\n            workdir=self.target.working_directory,\n            bindir=self.target.executables_directory, args=args)\n\n        self._kill_recentfling()\n        self.target.ensure_screen_is_on()\n\n    def run(self, context):\n        self.output = \"\"\n        try:\n            self.output = self.target.execute(self.cmd, timeout=120)\n        except KeyboardInterrupt:\n            self._kill_recentfling()\n            raise\n\n    def update_output(self, context):\n        loop_group_names = [\"90th Percentile\", \"95th Percentile\", \"99th Percentile\", \"Jank\", \"Jank%\"]\n        count = 0\n        p = re.compile(\"Frames: \\d+ latency: (?P<pct90>\\d+)/(?P<pct95>\\d+)/(?P<pct99>\\d+) Janks: (?P<jank>\\d+)\\((?P<jank_pct>\\d+)%\\)\")\n        for line in self.output.strip().splitlines():\n            match = p.search(line)\n            if match:\n                if line.startswith(\"AVE: \"):\n                    group_names = [\"Average \" + g for g in loop_group_names]\n                    classifiers = {\"loop\": \"Average\"}\n                else:\n                    count += 1\n                    group_names = loop_group_names\n                    classifiers = {\"loop\": count}\n\n                for (name, metric) in zip(group_names, match.groups()):\n                    context.add_metric(name, metric,\n                                       classifiers=classifiers)\n\n    @once\n    def finalize(self, context):\n        self.target.uninstall_executable(self.recentfling_target)\n        self.target.uninstall_executable(self.defs_target)\n\n    def _kill_recentfling(self):\n        command = 'cat {}/pidfile'.format(self.target.working_directory)\n        try:\n            pid = self.target.execute(command)\n            if pid.strip():\n                self.target.kill(pid.strip(), signal='SIGKILL')\n        except TargetError:\n            # recentfling is not running\n            pass\n"
  },
  {
    "path": "wa/workloads/rt_app/LICENSE",
    "content": "rt-app binaries and workgen script included with this workload are distributed\nunder GPL version 2; The full text of the license may be viewed here:\n\nhttp://www.gnu.org/licenses/gpl-2.0.html\n\nSource for these binaries may be obtained from Linaro here:\n\nhttps://git.linaro.org/power/rt-app.git\n"
  },
  {
    "path": "wa/workloads/rt_app/__init__.py",
    "content": "#    Copyright 2015-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport re\nimport json\nimport tarfile\nfrom collections import OrderedDict\nfrom subprocess import CalledProcessError\n\nfrom wa import Workload, Parameter, Executable, File\nfrom wa.framework.exception import WorkloadError, ResourceError, ConfigError\nfrom wa.utils.misc import check_output, safe_extract\nfrom wa.utils.exec_control import once\nfrom wa.utils.types import cpu_mask\n\n\nRAW_OUTPUT_FILENAME = 'raw-output.txt'\nTARBALL_FILENAME = 'rtapp-logs.tar.gz'\nBINARY_NAME = 'rt-app'\nPACKAGED_USE_CASE_DIRECTORY = os.path.abspath(os.path.join(os.path.dirname(__file__), 'use_cases'))\n\nPLOAD_REGEX = re.compile(r'pLoad = (\\d+)(\\w+) : calib_cpu (\\d+)')\nERROR_REGEX = re.compile(r'error')\nCRIT_REGEX = re.compile(r'crit')\n\n\nclass RtApp(Workload):\n    # pylint: disable=no-member,attribute-defined-outside-init\n\n    name = 'rt-app'\n    description = \"\"\"\n    A test application that simulates configurable real-time periodic load.\n\n    rt-app is a test application that starts multiple periodic threads in order to\n    simulate a real-time periodic load. It supports SCHED_OTHER, SCHED_FIFO,\n    SCHED_RR as well as the AQuoSA framework and SCHED_DEADLINE.\n\n    The load is described using JSON-like config files. Below are a couple of simple\n    examples.\n\n\n    Simple use case which creates a thread that run 1ms then sleep 9ms\n    until the use case is stopped with Ctrl+C:\n\n    .. code-block:: json\n\n        {\n            \"tasks\" : {\n                \"thread0\" : {\n                    \"loop\" : -1,\n                    \"run\" :   20000,\n                    \"sleep\" : 80000\n                }\n            },\n            \"global\" : {\n                \"duration\" : 2,\n                \"calibration\" : \"CPU0\",\n                \"default_policy\" : \"SCHED_OTHER\",\n                \"pi_enabled\" : false,\n                \"lock_pages\" : false,\n                \"logdir\" : \"./\",\n                \"log_basename\" : \"rt-app1\",\n                \"ftrace\" : false,\n                \"gnuplot\" : true,\n            }\n        }\n\n\n    Simple use case with 2 threads that runs for 10 ms and wake up each\n    other until the use case is stopped with Ctrl+C\n\n    .. code-block:: json\n\n        {\n            \"tasks\" : {\n                \"thread0\" : {\n                    \"loop\" : -1,\n                    \"run\" :     10000,\n                    \"resume\" : \"thread1\",\n                    \"suspend\" : \"thread0\"\n                },\n                \"thread1\" : {\n                    \"loop\" : -1,\n                    \"run\" :     10000,\n                    \"resume\" : \"thread0\",\n                    \"suspend\" : \"thread1\"\n                }\n            }\n        }\n\n    Please refer to the existing configs in ``$WA_ROOT/wa/workloads/rt_app/use_case``\n    for more examples.\n\n    The upstream version of rt-app is hosted here:\n\n    https://github.com/scheduler-tools/rt-app\n\n    \"\"\"\n\n    parameters = [\n        Parameter('config', kind=str, default='taskset',\n                  description='''\n                  Use case configuration file to run with rt-app. This may be\n                  either the name of one of the \"standard\" configurations included\n                  with the workload. or a path to a custom JSON file provided by\n                  the user. Either way, the \".json\" extension is implied and will\n                  be added automatically if not specified in the argument.\n\n                  The following is the list of standard configurations currently\n                  included with the workload: {}\n\n                  '''.format(', '.join(os.listdir(PACKAGED_USE_CASE_DIRECTORY)))),\n        Parameter('duration', kind=int,\n                  description='''\n                  Duration of the workload execution in Seconds. If specified, this\n                  will override the corresponding parameter in the JSON config.\n                  '''),\n        Parameter('cpus', kind=cpu_mask, default=0, aliases=['taskset_mask'],\n                  description='Constrain execution to specific CPUs.'),\n        Parameter('uninstall', aliases=['uninstall_on_exit'], kind=bool, default=False,\n                  override=True, description=\"\"\"\n                  If set to ``True``, rt-app binary will be uninstalled from the device\n                  at the end of the run.\n                  \"\"\"),\n        Parameter('force_install', kind=bool, default=False,\n                  description=\"\"\"\n                  If set to ``True``, rt-app binary will always be deployed to the\n                  target device at the beginning of the run, regardless of whether it\n                  was already installed there.\n                  \"\"\"),\n    ]\n\n    @once\n    def initialize(self, context):\n        # initialize() runs once per run. setting a class variable to make it\n        # available to other instances of the workload\n        RtApp.target_working_directory = self.target.path.join(self.target.working_directory,\n                                                               'rt-app-working')\n        RtApp.host_binary = context.get_resource(Executable(self,\n                                                            self.target.abi,\n                                                            BINARY_NAME), strict=False)\n        RtApp.workgen_script = context.get_resource(File(self, 'workgen'))\n        self.target.execute('mkdir -p {}'.format(self.target_working_directory))\n        self._deploy_rt_app_binary_if_necessary()\n\n    def setup(self, context):\n        self.output = None\n        self.log_basename = context.current_job.label\n        self.host_json_config = self._load_json_config(context)\n        self.config_file_on_target = self.target.path.join(self.target_working_directory,\n                                                           os.path.basename(self.host_json_config))\n        self.target.push(self.host_json_config, self.config_file_on_target)\n        self.command = '{} {}'.format(self.target_binary, self.config_file_on_target)\n\n        time_buffer = 30\n        self.timeout = self.duration + time_buffer\n\n    def run(self, context):\n        self.output = self.target.invoke(self.command,\n                                         in_directory=self.target_working_directory,\n                                         on_cpus=self.cpus and self.cpus.list() or None,\n                                         redirect_stderr=True,\n                                         timeout=self.timeout,\n                                         as_root=self.target.is_rooted)\n\n    def update_output(self, context):\n        self._pull_rt_app_logs(context)\n        context.output.classifiers.update(dict(\n            duration=self.duration,\n            task_count=self.task_count,\n        ))\n\n        if not self.output:\n            return\n        outfile = os.path.join(context.output_directory, RAW_OUTPUT_FILENAME)\n        with open(outfile, 'w') as wfh:\n            wfh.write(self.output)\n\n        error_count = 0\n        crit_count = 0\n        for line in self.output.split('\\n'):\n            match = PLOAD_REGEX.search(line)\n            if match:\n                pload_value = match.group(1)\n                pload_unit = match.group(2)\n                calib_cpu_value = match.group(3)\n                context.add_metric('pLoad', float(pload_value), pload_unit)\n                context.add_metric('calib_cpu', float(calib_cpu_value))\n\n            error_match = ERROR_REGEX.search(line)\n            if error_match:\n                error_count += 1\n\n            crit_match = CRIT_REGEX.search(line)\n            if crit_match:\n                crit_count += 1\n\n        context.add_metric('error_count', error_count, 'count')\n        context.add_metric('crit_count', crit_count, 'count')\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall(self.target_binary)\n        if self.cleanup_assets:\n            self.target.execute('rm -rf {}'.format(self.target_working_directory))\n\n    def _deploy_rt_app_binary_if_necessary(self):\n        # called from initialize() so gets invoked once per run\n        RtApp.target_binary = self.target.get_installed(\"rt-app\")\n        if self.force_install or not RtApp.target_binary:\n            if not self.host_binary:\n                message = '''rt-app is not installed on the target and could not be\n                             found in workload resources'''\n                raise ResourceError(message)\n            RtApp.target_binary = self.target.install(self.host_binary)\n\n    def _load_json_config(self, context):\n        user_config_file = self._get_raw_json_config(context)\n        config_file = self._generate_workgen_config(user_config_file,\n                                                    context.output_directory)\n        with open(config_file) as fh:\n            try:\n                config_data = json.load(fh, object_pairs_hook=OrderedDict)\n            except ValueError:\n                # We were not able to parse the JSON file. Raise an informative error.\n                msg = \"Failed to parse {}. Please make sure it is valid JSON.\"\n                raise ConfigError(msg.format(user_config_file))\n\n        self._update_rt_app_config(config_data)\n        self.duration = config_data['global'].get('duration', 0)\n        self.task_count = len(config_data.get('tasks', []))\n        with open(config_file, 'w') as wfh:\n            json.dump(config_data, wfh, indent=4)\n        return config_file\n\n    def _get_raw_json_config(self, resolver):\n        if os.path.splitext(self.config)[1] != '.json':\n            self.config += '.json'\n        if os.path.isfile(self.config):\n            return os.path.abspath(self.config)\n        partial_path = os.path.join('use_cases', self.config)\n        return resolver.get(File(self, partial_path))\n\n    def _generate_workgen_config(self, user_file, output_directory):\n        output_file = os.path.join(output_directory, 'unkind.json')\n        # use workgen dry run option to generate a use case\n        # file with proper JSON grammar on host first\n        try:\n            check_output('python3 {} -d -o {} {}'.format(self.workgen_script,\n                                                         output_file,\n                                                         user_file),\n                         shell=True)\n        except CalledProcessError as e:\n            message = 'Could not generate config using workgen, got \"{}\"'\n            raise WorkloadError(message.format(e))\n        return output_file\n\n    def _update_rt_app_config(self, config_data):\n        config_data['global'] = config_data.get('global', {})\n        config_data['global']['logdir'] = self.target_working_directory\n        config_data['global']['log_basename'] = self.log_basename\n        if self.duration is not None:\n            config_data['global']['duration'] = self.duration\n\n    def _pull_rt_app_logs(self, context):\n        tar_command = '{} tar czf {}/{} -C {} .'.format(self.target.busybox,\n                                                        self.target_working_directory,\n                                                        TARBALL_FILENAME,\n                                                        self.target_working_directory)\n        self.target.execute(tar_command, timeout=300)\n        target_path = self.target.path.join(self.target_working_directory, TARBALL_FILENAME)\n        host_path = os.path.join(context.output_directory, TARBALL_FILENAME)\n        self.target.pull(target_path, host_path)\n        with tarfile.open(host_path, 'r:gz') as tf:\n            safe_extract(tf, context.output_directory)\n        os.remove(host_path)\n        self.target.execute('rm -rf {}/*'.format(self.target_working_directory))\n"
  },
  {
    "path": "wa/workloads/rt_app/bin/arm64/README.rt-app",
    "content": "Sources of rt-app available at:\nGit commit: 857d6a6624469ba275a37493a10ebba00a50b467\nGit repository: https://github.com/douglas-raillard-arm/rt-app.git\n\n\nBuild host info:\n\nNAME=\"Alpine Linux\"\nID=alpine\nVERSION_ID=3.18.3\nPRETTY_NAME=\"Alpine Linux v3.18\"\nHOME_URL=\"https://alpinelinux.org/\"\nBUG_REPORT_URL=\"https://gitlab.alpinelinux.org/alpine/aports/-/issues\"\n\n\nBuild recipe:\n\nexport ARCH=arm64\nexport BUILD_DIR=/tmp/tmpjnvvg6dg/arm64/source\nexport LISA_ARCH_ASSETS=/lisa/_assets/binaries/arm64\nexport LISA_HOME=''\n#! /bin/bash\n\nALPINE_VERSION=v3.18\nALPINE_BUILD_DEPENDENCIES=(autoconf automake bash cmake gcc git make libtool linux-headers musl-dev)\n\nbuild_jsonc() {\n    # As recommended in the README, build in a separate tree. The folder needs\n    # to be called \"json-c\" as rt-app will #include <json-c/json.h>\n    mkdir json-c\n    cd json-c\n    export ac_cv_func_malloc_0_nonnull=yes\n    export ac_cv_func_realloc_0_nonnull=yes\n    cmake ../json-c -DBUILD_SHARED_LIBS=OFF -DBUILD_STATIC_LIBS=ON -DCMAKE_C_COMPILER=${CROSS_COMPILE}gcc\n    make\n}\n\nbuild_numactl() {\n    cd numactl\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST --disable-shared --enable-static\n    make\n}\n\nbuild_rtapp() {\n    cd rt-app\n    export ac_cv_lib_json_c_json_object_from_file=yes\n    export ac_cv_lib_numa_numa_available=yes\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST LDFLAGS=\"--static -L$BUILD_DIR/json-c/ -L$BUILD_DIR/numactl/\" \\\n                CFLAGS=\"-I$BUILD_DIR -I$BUILD_DIR/numactl -I$BUILD_DIR/json-c\" --with-deadline\n    make\n    \"$CROSS_COMPILE\"strip src/rt-app\n}\n\ndownload() {\n    git clone https://github.com/json-c/json-c json-c\n    # git -C json-c checkout ddd049045d98dd3163d01a7d79184b3c7fb95a14\n    git -C json-c checkout json-c-0.16-20220414\n\n    git clone https://github.com/numactl/numactl.git\n    git -C numactl checkout v2.0.16\n\n    git clone https://github.com/douglas-raillard-arm/rt-app.git\n    # Branch: lisa\n    git -C rt-app checkout 857d6a6624469ba275a37493a10ebba00a50b467\n\n}\n\nbuild() {\n    (build_jsonc) && (build_numactl) && (build_rtapp)\n}\n\ninstall() {\n    source \"$LISA_HOME/tools/recipes/utils.sh\"\n    cp -v rt-app/src/rt-app \"$LISA_ARCH_ASSETS/rt-app\"\n\n    install_readme rt-app rt-app COPYING.in\n    # According to the readme, libnumactl is under the\n    # GNU Lesser General Public License, v2.1.\n    install_readme libnumactl numactl LICENSE.LGPL2.1\n    install_readme json-c json-c COPYING\n}\n\n\nThe sources were distributed under the following licence (content of rt-app/COPYING.in):\n\n\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1990, 1991, 1992 Free Software Foundation, Inc.\n 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n\n\nThe sources were compiled with musl-libc (content of COPYRIGHT):\n\nmusl as a whole is licensed under the following standard MIT license:\n\n----------------------------------------------------------------------\nCopyright © 2005-2020 Rich Felker, et al.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n----------------------------------------------------------------------\n\nAuthors/contributors include:\n\nA. Wilcox\nAda Worcester\nAlex Dowad\nAlex Suykov\nAlexander Monakov\nAndre McCurdy\nAndrew Kelley\nAnthony G. Basile\nAric Belsito\nArvid Picciani\nBartosz Brachaczek\nBenjamin Peterson\nBobby Bingham\nBoris Brezillon\nBrent Cook\nChris Spiegel\nClément Vasseur\nDaniel Micay\nDaniel Sabogal\nDaurnimator\nDavid Carlier\nDavid Edelsohn\nDenys Vlasenko\nDmitry Ivanov\nDmitry V. Levin\nDrew DeVault\nEmil Renner Berthing\nFangrui Song\nFelix Fietkau\nFelix Janda\nGianluca Anzolin\nHauke Mehrtens\nHe X\nHiltjo Posthuma\nIsaac Dunham\nJaydeep Patil\nJens Gustedt\nJeremy Huntwork\nJo-Philipp Wich\nJoakim Sindholt\nJohn Spencer\nJulien Ramseier\nJustin Cormack\nKaarle Ritvanen\nKhem Raj\nKylie McClain\nLeah Neukirchen\nLuca Barbato\nLuka Perkov\nM Farkas-Dyck (Strake)\nMahesh Bodapati\nMarkus Wichmann\nMasanori Ogino\nMichael Clark\nMichael Forney\nMikhail Kremnyov\nNatanael Copa\nNicholas J. Kain\norc\nPascal Cuoq\nPatrick Oppenlander\nPetr Hosek\nPetr Skocik\nPierre Carrier\nReini Urban\nRich Felker\nRichard Pennington\nRyan Fairfax\nSamuel Holland\nSegev Finer\nShiz\nsin\nSolar Designer\nStefan Kristiansson\nStefan O'Rear\nSzabolcs Nagy\nTimo Teräs\nTrutz Behn\nValentin Ochs\nWill Dietz\nWilliam Haddon\nWilliam Pitcock\n\nPortions of this software are derived from third-party works licensed\nunder terms compatible with the above MIT license:\n\nThe TRE regular expression implementation (src/regex/reg* and\nsrc/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed\nunder a 2-clause BSD license (license text in the source files). The\nincluded version has been heavily modified by Rich Felker in 2012, in\nthe interests of size, simplicity, and namespace cleanliness.\n\nMuch of the math library code (src/math/* and src/complex/*) is\nCopyright © 1993,2004 Sun Microsystems or\nCopyright © 2003-2011 David Schultz or\nCopyright © 2003-2009 Steven G. Kargl or\nCopyright © 2003-2009 Bruce D. Evans or\nCopyright © 2008 Stephen L. Moshier or\nCopyright © 2017-2018 Arm Limited\nand labelled as such in comments in the individual source files. All\nhave been licensed under extremely permissive terms.\n\nThe ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008\nThe Android Open Source Project and is licensed under a two-clause BSD\nlicense. It was taken from Bionic libc, used on Android.\n\nThe AArch64 memcpy and memset code (src/string/aarch64/*) are\nCopyright © 1999-2019, Arm Limited.\n\nThe implementation of DES for crypt (src/crypt/crypt_des.c) is\nCopyright © 1994 David Burren. It is licensed under a BSD license.\n\nThe implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was\noriginally written by Solar Designer and placed into the public\ndomain. The code also comes with a fallback permissive license for use\nin jurisdictions that may not recognize the public domain.\n\nThe smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011\nValentin Ochs and is licensed under an MIT-style license.\n\nThe x86_64 port was written by Nicholas J. Kain and is licensed under\nthe standard MIT terms.\n\nThe mips and microblaze ports were originally written by Richard\nPennington for use in the ellcc project. The original code was adapted\nby Rich Felker for build system and code conventions during upstream\nintegration. It is licensed under the standard MIT terms.\n\nThe mips64 port was contributed by Imagination Technologies and is\nlicensed under the standard MIT terms.\n\nThe powerpc port was also originally written by Richard Pennington,\nand later supplemented and integrated by John Spencer. It is licensed\nunder the standard MIT terms.\n\nAll other files which have no copyright comments are original works\nproduced specifically for use as part of this library, written either\nby Rich Felker, the main author of the library, or by one or more\ncontibutors listed above. Details on authorship of individual files\ncan be found in the git version control history of the project. The\nomission of copyright and license comments in each file is in the\ninterest of source tree size.\n\nIn addition, permission is hereby granted for all public header files\n(include/* and arch/*/bits/*) and crt files intended to be linked into\napplications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit\nthe copyright notice and permission notice otherwise required by the\nlicense, and to use these files without any requirement of\nattribution. These files include substantial contributions from:\n\nBobby Bingham\nJohn Spencer\nNicholas J. Kain\nRich Felker\nRichard Pennington\nStefan Kristiansson\nSzabolcs Nagy\n\nall of whom have explicitly granted such permission.\n\nThis file previously contained text expressing a belief that most of\nthe files covered by the above exception were sufficiently trivial not\nto be subject to copyright, resulting in confusion over whether it\nnegated the permissions granted in the license. In the spirit of\npermissive licensing, and of not having licensing issues being an\nobstacle to adoption, that text has been removed.\n"
  },
  {
    "path": "wa/workloads/rt_app/bin/armeabi/README.rt-app",
    "content": "Sources of rt-app available at:\nGit commit: 857d6a6624469ba275a37493a10ebba00a50b467\nGit repository: https://github.com/douglas-raillard-arm/rt-app.git\n\n\nBuild host info:\n\nNAME=\"Alpine Linux\"\nID=alpine\nVERSION_ID=3.18.3\nPRETTY_NAME=\"Alpine Linux v3.18\"\nHOME_URL=\"https://alpinelinux.org/\"\nBUG_REPORT_URL=\"https://gitlab.alpinelinux.org/alpine/aports/-/issues\"\n\n\nBuild recipe:\n\nexport ARCH=armeabi\nexport BUILD_DIR=/tmp/tmpjnvvg6dg/armeabi/source\nexport LISA_ARCH_ASSETS=/lisa/_assets/binaries/armeabi\nexport LISA_HOME=''\n#! /bin/bash\n\nALPINE_VERSION=v3.18\nALPINE_BUILD_DEPENDENCIES=(autoconf automake bash cmake gcc git make libtool linux-headers musl-dev)\n\nbuild_jsonc() {\n    # As recommended in the README, build in a separate tree. The folder needs\n    # to be called \"json-c\" as rt-app will #include <json-c/json.h>\n    mkdir json-c\n    cd json-c\n    export ac_cv_func_malloc_0_nonnull=yes\n    export ac_cv_func_realloc_0_nonnull=yes\n    cmake ../json-c -DBUILD_SHARED_LIBS=OFF -DBUILD_STATIC_LIBS=ON -DCMAKE_C_COMPILER=${CROSS_COMPILE}gcc\n    make\n}\n\nbuild_numactl() {\n    cd numactl\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST --disable-shared --enable-static\n    make\n}\n\nbuild_rtapp() {\n    cd rt-app\n    export ac_cv_lib_json_c_json_object_from_file=yes\n    export ac_cv_lib_numa_numa_available=yes\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST LDFLAGS=\"--static -L$BUILD_DIR/json-c/ -L$BUILD_DIR/numactl/\" \\\n                CFLAGS=\"-I$BUILD_DIR -I$BUILD_DIR/numactl -I$BUILD_DIR/json-c\" --with-deadline\n    make\n    \"$CROSS_COMPILE\"strip src/rt-app\n}\n\ndownload() {\n    git clone https://github.com/json-c/json-c json-c\n    # git -C json-c checkout ddd049045d98dd3163d01a7d79184b3c7fb95a14\n    git -C json-c checkout json-c-0.16-20220414\n\n    git clone https://github.com/numactl/numactl.git\n    git -C numactl checkout v2.0.16\n\n    git clone https://github.com/douglas-raillard-arm/rt-app.git\n    # Branch: lisa\n    git -C rt-app checkout 857d6a6624469ba275a37493a10ebba00a50b467\n\n}\n\nbuild() {\n    (build_jsonc) && (build_numactl) && (build_rtapp)\n}\n\ninstall() {\n    source \"$LISA_HOME/tools/recipes/utils.sh\"\n    cp -v rt-app/src/rt-app \"$LISA_ARCH_ASSETS/rt-app\"\n\n    install_readme rt-app rt-app COPYING.in\n    # According to the readme, libnumactl is under the\n    # GNU Lesser General Public License, v2.1.\n    install_readme libnumactl numactl LICENSE.LGPL2.1\n    install_readme json-c json-c COPYING\n}\n\n\nThe sources were distributed under the following licence (content of rt-app/COPYING.in):\n\n\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1990, 1991, 1992 Free Software Foundation, Inc.\n 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n\n\nThe sources were compiled with musl-libc (content of COPYRIGHT):\n\nmusl as a whole is licensed under the following standard MIT license:\n\n----------------------------------------------------------------------\nCopyright © 2005-2020 Rich Felker, et al.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n----------------------------------------------------------------------\n\nAuthors/contributors include:\n\nA. Wilcox\nAda Worcester\nAlex Dowad\nAlex Suykov\nAlexander Monakov\nAndre McCurdy\nAndrew Kelley\nAnthony G. Basile\nAric Belsito\nArvid Picciani\nBartosz Brachaczek\nBenjamin Peterson\nBobby Bingham\nBoris Brezillon\nBrent Cook\nChris Spiegel\nClément Vasseur\nDaniel Micay\nDaniel Sabogal\nDaurnimator\nDavid Carlier\nDavid Edelsohn\nDenys Vlasenko\nDmitry Ivanov\nDmitry V. Levin\nDrew DeVault\nEmil Renner Berthing\nFangrui Song\nFelix Fietkau\nFelix Janda\nGianluca Anzolin\nHauke Mehrtens\nHe X\nHiltjo Posthuma\nIsaac Dunham\nJaydeep Patil\nJens Gustedt\nJeremy Huntwork\nJo-Philipp Wich\nJoakim Sindholt\nJohn Spencer\nJulien Ramseier\nJustin Cormack\nKaarle Ritvanen\nKhem Raj\nKylie McClain\nLeah Neukirchen\nLuca Barbato\nLuka Perkov\nM Farkas-Dyck (Strake)\nMahesh Bodapati\nMarkus Wichmann\nMasanori Ogino\nMichael Clark\nMichael Forney\nMikhail Kremnyov\nNatanael Copa\nNicholas J. Kain\norc\nPascal Cuoq\nPatrick Oppenlander\nPetr Hosek\nPetr Skocik\nPierre Carrier\nReini Urban\nRich Felker\nRichard Pennington\nRyan Fairfax\nSamuel Holland\nSegev Finer\nShiz\nsin\nSolar Designer\nStefan Kristiansson\nStefan O'Rear\nSzabolcs Nagy\nTimo Teräs\nTrutz Behn\nValentin Ochs\nWill Dietz\nWilliam Haddon\nWilliam Pitcock\n\nPortions of this software are derived from third-party works licensed\nunder terms compatible with the above MIT license:\n\nThe TRE regular expression implementation (src/regex/reg* and\nsrc/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed\nunder a 2-clause BSD license (license text in the source files). The\nincluded version has been heavily modified by Rich Felker in 2012, in\nthe interests of size, simplicity, and namespace cleanliness.\n\nMuch of the math library code (src/math/* and src/complex/*) is\nCopyright © 1993,2004 Sun Microsystems or\nCopyright © 2003-2011 David Schultz or\nCopyright © 2003-2009 Steven G. Kargl or\nCopyright © 2003-2009 Bruce D. Evans or\nCopyright © 2008 Stephen L. Moshier or\nCopyright © 2017-2018 Arm Limited\nand labelled as such in comments in the individual source files. All\nhave been licensed under extremely permissive terms.\n\nThe ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008\nThe Android Open Source Project and is licensed under a two-clause BSD\nlicense. It was taken from Bionic libc, used on Android.\n\nThe AArch64 memcpy and memset code (src/string/aarch64/*) are\nCopyright © 1999-2019, Arm Limited.\n\nThe implementation of DES for crypt (src/crypt/crypt_des.c) is\nCopyright © 1994 David Burren. It is licensed under a BSD license.\n\nThe implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was\noriginally written by Solar Designer and placed into the public\ndomain. The code also comes with a fallback permissive license for use\nin jurisdictions that may not recognize the public domain.\n\nThe smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011\nValentin Ochs and is licensed under an MIT-style license.\n\nThe x86_64 port was written by Nicholas J. Kain and is licensed under\nthe standard MIT terms.\n\nThe mips and microblaze ports were originally written by Richard\nPennington for use in the ellcc project. The original code was adapted\nby Rich Felker for build system and code conventions during upstream\nintegration. It is licensed under the standard MIT terms.\n\nThe mips64 port was contributed by Imagination Technologies and is\nlicensed under the standard MIT terms.\n\nThe powerpc port was also originally written by Richard Pennington,\nand later supplemented and integrated by John Spencer. It is licensed\nunder the standard MIT terms.\n\nAll other files which have no copyright comments are original works\nproduced specifically for use as part of this library, written either\nby Rich Felker, the main author of the library, or by one or more\ncontibutors listed above. Details on authorship of individual files\ncan be found in the git version control history of the project. The\nomission of copyright and license comments in each file is in the\ninterest of source tree size.\n\nIn addition, permission is hereby granted for all public header files\n(include/* and arch/*/bits/*) and crt files intended to be linked into\napplications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit\nthe copyright notice and permission notice otherwise required by the\nlicense, and to use these files without any requirement of\nattribution. These files include substantial contributions from:\n\nBobby Bingham\nJohn Spencer\nNicholas J. Kain\nRich Felker\nRichard Pennington\nStefan Kristiansson\nSzabolcs Nagy\n\nall of whom have explicitly granted such permission.\n\nThis file previously contained text expressing a belief that most of\nthe files covered by the above exception were sufficiently trivial not\nto be subject to copyright, resulting in confusion over whether it\nnegated the permissions granted in the license. In the spirit of\npermissive licensing, and of not having licensing issues being an\nobstacle to adoption, that text has been removed.\n"
  },
  {
    "path": "wa/workloads/rt_app/bin/ppc64le/README.rt-app",
    "content": "Sources of rt-app available at:\nGit commit: 857d6a6624469ba275a37493a10ebba00a50b467\nGit repository: https://github.com/douglas-raillard-arm/rt-app.git\n\n\nBuild host info:\n\nNAME=\"Alpine Linux\"\nID=alpine\nVERSION_ID=3.18.3\nPRETTY_NAME=\"Alpine Linux v3.18\"\nHOME_URL=\"https://alpinelinux.org/\"\nBUG_REPORT_URL=\"https://gitlab.alpinelinux.org/alpine/aports/-/issues\"\n\n\nBuild recipe:\n\nexport ARCH=ppc64le\nexport BUILD_DIR=/tmp/tmpjnvvg6dg/ppc64le/source\nexport LISA_ARCH_ASSETS=/lisa/_assets/binaries/ppc64le\nexport LISA_HOME=''\n#! /bin/bash\n\nALPINE_VERSION=v3.18\nALPINE_BUILD_DEPENDENCIES=(autoconf automake bash cmake gcc git make libtool linux-headers musl-dev)\n\nbuild_jsonc() {\n    # As recommended in the README, build in a separate tree. The folder needs\n    # to be called \"json-c\" as rt-app will #include <json-c/json.h>\n    mkdir json-c\n    cd json-c\n    export ac_cv_func_malloc_0_nonnull=yes\n    export ac_cv_func_realloc_0_nonnull=yes\n    cmake ../json-c -DBUILD_SHARED_LIBS=OFF -DBUILD_STATIC_LIBS=ON -DCMAKE_C_COMPILER=${CROSS_COMPILE}gcc\n    make\n}\n\nbuild_numactl() {\n    cd numactl\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST --disable-shared --enable-static\n    make\n}\n\nbuild_rtapp() {\n    cd rt-app\n    export ac_cv_lib_json_c_json_object_from_file=yes\n    export ac_cv_lib_numa_numa_available=yes\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST LDFLAGS=\"--static -L$BUILD_DIR/json-c/ -L$BUILD_DIR/numactl/\" \\\n                CFLAGS=\"-I$BUILD_DIR -I$BUILD_DIR/numactl -I$BUILD_DIR/json-c\" --with-deadline\n    make\n    \"$CROSS_COMPILE\"strip src/rt-app\n}\n\ndownload() {\n    git clone https://github.com/json-c/json-c json-c\n    # git -C json-c checkout ddd049045d98dd3163d01a7d79184b3c7fb95a14\n    git -C json-c checkout json-c-0.16-20220414\n\n    git clone https://github.com/numactl/numactl.git\n    git -C numactl checkout v2.0.16\n\n    git clone https://github.com/douglas-raillard-arm/rt-app.git\n    # Branch: lisa\n    git -C rt-app checkout 857d6a6624469ba275a37493a10ebba00a50b467\n\n}\n\nbuild() {\n    (build_jsonc) && (build_numactl) && (build_rtapp)\n}\n\ninstall() {\n    source \"$LISA_HOME/tools/recipes/utils.sh\"\n    cp -v rt-app/src/rt-app \"$LISA_ARCH_ASSETS/rt-app\"\n\n    install_readme rt-app rt-app COPYING.in\n    # According to the readme, libnumactl is under the\n    # GNU Lesser General Public License, v2.1.\n    install_readme libnumactl numactl LICENSE.LGPL2.1\n    install_readme json-c json-c COPYING\n}\n\n\nThe sources were distributed under the following licence (content of rt-app/COPYING.in):\n\n\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1990, 1991, 1992 Free Software Foundation, Inc.\n 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n\n\nThe sources were compiled with musl-libc (content of COPYRIGHT):\n\nmusl as a whole is licensed under the following standard MIT license:\n\n----------------------------------------------------------------------\nCopyright © 2005-2020 Rich Felker, et al.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n----------------------------------------------------------------------\n\nAuthors/contributors include:\n\nA. Wilcox\nAda Worcester\nAlex Dowad\nAlex Suykov\nAlexander Monakov\nAndre McCurdy\nAndrew Kelley\nAnthony G. Basile\nAric Belsito\nArvid Picciani\nBartosz Brachaczek\nBenjamin Peterson\nBobby Bingham\nBoris Brezillon\nBrent Cook\nChris Spiegel\nClément Vasseur\nDaniel Micay\nDaniel Sabogal\nDaurnimator\nDavid Carlier\nDavid Edelsohn\nDenys Vlasenko\nDmitry Ivanov\nDmitry V. Levin\nDrew DeVault\nEmil Renner Berthing\nFangrui Song\nFelix Fietkau\nFelix Janda\nGianluca Anzolin\nHauke Mehrtens\nHe X\nHiltjo Posthuma\nIsaac Dunham\nJaydeep Patil\nJens Gustedt\nJeremy Huntwork\nJo-Philipp Wich\nJoakim Sindholt\nJohn Spencer\nJulien Ramseier\nJustin Cormack\nKaarle Ritvanen\nKhem Raj\nKylie McClain\nLeah Neukirchen\nLuca Barbato\nLuka Perkov\nM Farkas-Dyck (Strake)\nMahesh Bodapati\nMarkus Wichmann\nMasanori Ogino\nMichael Clark\nMichael Forney\nMikhail Kremnyov\nNatanael Copa\nNicholas J. Kain\norc\nPascal Cuoq\nPatrick Oppenlander\nPetr Hosek\nPetr Skocik\nPierre Carrier\nReini Urban\nRich Felker\nRichard Pennington\nRyan Fairfax\nSamuel Holland\nSegev Finer\nShiz\nsin\nSolar Designer\nStefan Kristiansson\nStefan O'Rear\nSzabolcs Nagy\nTimo Teräs\nTrutz Behn\nValentin Ochs\nWill Dietz\nWilliam Haddon\nWilliam Pitcock\n\nPortions of this software are derived from third-party works licensed\nunder terms compatible with the above MIT license:\n\nThe TRE regular expression implementation (src/regex/reg* and\nsrc/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed\nunder a 2-clause BSD license (license text in the source files). The\nincluded version has been heavily modified by Rich Felker in 2012, in\nthe interests of size, simplicity, and namespace cleanliness.\n\nMuch of the math library code (src/math/* and src/complex/*) is\nCopyright © 1993,2004 Sun Microsystems or\nCopyright © 2003-2011 David Schultz or\nCopyright © 2003-2009 Steven G. Kargl or\nCopyright © 2003-2009 Bruce D. Evans or\nCopyright © 2008 Stephen L. Moshier or\nCopyright © 2017-2018 Arm Limited\nand labelled as such in comments in the individual source files. All\nhave been licensed under extremely permissive terms.\n\nThe ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008\nThe Android Open Source Project and is licensed under a two-clause BSD\nlicense. It was taken from Bionic libc, used on Android.\n\nThe AArch64 memcpy and memset code (src/string/aarch64/*) are\nCopyright © 1999-2019, Arm Limited.\n\nThe implementation of DES for crypt (src/crypt/crypt_des.c) is\nCopyright © 1994 David Burren. It is licensed under a BSD license.\n\nThe implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was\noriginally written by Solar Designer and placed into the public\ndomain. The code also comes with a fallback permissive license for use\nin jurisdictions that may not recognize the public domain.\n\nThe smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011\nValentin Ochs and is licensed under an MIT-style license.\n\nThe x86_64 port was written by Nicholas J. Kain and is licensed under\nthe standard MIT terms.\n\nThe mips and microblaze ports were originally written by Richard\nPennington for use in the ellcc project. The original code was adapted\nby Rich Felker for build system and code conventions during upstream\nintegration. It is licensed under the standard MIT terms.\n\nThe mips64 port was contributed by Imagination Technologies and is\nlicensed under the standard MIT terms.\n\nThe powerpc port was also originally written by Richard Pennington,\nand later supplemented and integrated by John Spencer. It is licensed\nunder the standard MIT terms.\n\nAll other files which have no copyright comments are original works\nproduced specifically for use as part of this library, written either\nby Rich Felker, the main author of the library, or by one or more\ncontibutors listed above. Details on authorship of individual files\ncan be found in the git version control history of the project. The\nomission of copyright and license comments in each file is in the\ninterest of source tree size.\n\nIn addition, permission is hereby granted for all public header files\n(include/* and arch/*/bits/*) and crt files intended to be linked into\napplications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit\nthe copyright notice and permission notice otherwise required by the\nlicense, and to use these files without any requirement of\nattribution. These files include substantial contributions from:\n\nBobby Bingham\nJohn Spencer\nNicholas J. Kain\nRich Felker\nRichard Pennington\nStefan Kristiansson\nSzabolcs Nagy\n\nall of whom have explicitly granted such permission.\n\nThis file previously contained text expressing a belief that most of\nthe files covered by the above exception were sufficiently trivial not\nto be subject to copyright, resulting in confusion over whether it\nnegated the permissions granted in the license. In the spirit of\npermissive licensing, and of not having licensing issues being an\nobstacle to adoption, that text has been removed.\n"
  },
  {
    "path": "wa/workloads/rt_app/bin/x86/README.rt-app",
    "content": "Sources of rt-app available at:\nGit commit: 857d6a6624469ba275a37493a10ebba00a50b467\nGit repository: https://github.com/douglas-raillard-arm/rt-app.git\n\n\nBuild host info:\n\nNAME=\"Alpine Linux\"\nID=alpine\nVERSION_ID=3.18.3\nPRETTY_NAME=\"Alpine Linux v3.18\"\nHOME_URL=\"https://alpinelinux.org/\"\nBUG_REPORT_URL=\"https://gitlab.alpinelinux.org/alpine/aports/-/issues\"\n\n\nBuild recipe:\n\nexport ARCH=x86\nexport BUILD_DIR=/tmp/tmpjnvvg6dg/x86/source\nexport LISA_ARCH_ASSETS=/lisa/_assets/binaries/x86\nexport LISA_HOME=''\n#! /bin/bash\n\nALPINE_VERSION=v3.18\nALPINE_BUILD_DEPENDENCIES=(autoconf automake bash cmake gcc git make libtool linux-headers musl-dev)\n\nbuild_jsonc() {\n    # As recommended in the README, build in a separate tree. The folder needs\n    # to be called \"json-c\" as rt-app will #include <json-c/json.h>\n    mkdir json-c\n    cd json-c\n    export ac_cv_func_malloc_0_nonnull=yes\n    export ac_cv_func_realloc_0_nonnull=yes\n    cmake ../json-c -DBUILD_SHARED_LIBS=OFF -DBUILD_STATIC_LIBS=ON -DCMAKE_C_COMPILER=${CROSS_COMPILE}gcc\n    make\n}\n\nbuild_numactl() {\n    cd numactl\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST --disable-shared --enable-static\n    make\n}\n\nbuild_rtapp() {\n    cd rt-app\n    export ac_cv_lib_json_c_json_object_from_file=yes\n    export ac_cv_lib_numa_numa_available=yes\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST LDFLAGS=\"--static -L$BUILD_DIR/json-c/ -L$BUILD_DIR/numactl/\" \\\n                CFLAGS=\"-I$BUILD_DIR -I$BUILD_DIR/numactl -I$BUILD_DIR/json-c\" --with-deadline\n    make\n    \"$CROSS_COMPILE\"strip src/rt-app\n}\n\ndownload() {\n    git clone https://github.com/json-c/json-c json-c\n    # git -C json-c checkout ddd049045d98dd3163d01a7d79184b3c7fb95a14\n    git -C json-c checkout json-c-0.16-20220414\n\n    git clone https://github.com/numactl/numactl.git\n    git -C numactl checkout v2.0.16\n\n    git clone https://github.com/douglas-raillard-arm/rt-app.git\n    # Branch: lisa\n    git -C rt-app checkout 857d6a6624469ba275a37493a10ebba00a50b467\n\n}\n\nbuild() {\n    (build_jsonc) && (build_numactl) && (build_rtapp)\n}\n\ninstall() {\n    source \"$LISA_HOME/tools/recipes/utils.sh\"\n    cp -v rt-app/src/rt-app \"$LISA_ARCH_ASSETS/rt-app\"\n\n    install_readme rt-app rt-app COPYING.in\n    # According to the readme, libnumactl is under the\n    # GNU Lesser General Public License, v2.1.\n    install_readme libnumactl numactl LICENSE.LGPL2.1\n    install_readme json-c json-c COPYING\n}\n\n\nThe sources were distributed under the following licence (content of rt-app/COPYING.in):\n\n\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1990, 1991, 1992 Free Software Foundation, Inc.\n 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n\n\nThe sources were compiled with musl-libc (content of COPYRIGHT):\n\nmusl as a whole is licensed under the following standard MIT license:\n\n----------------------------------------------------------------------\nCopyright © 2005-2020 Rich Felker, et al.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n----------------------------------------------------------------------\n\nAuthors/contributors include:\n\nA. Wilcox\nAda Worcester\nAlex Dowad\nAlex Suykov\nAlexander Monakov\nAndre McCurdy\nAndrew Kelley\nAnthony G. Basile\nAric Belsito\nArvid Picciani\nBartosz Brachaczek\nBenjamin Peterson\nBobby Bingham\nBoris Brezillon\nBrent Cook\nChris Spiegel\nClément Vasseur\nDaniel Micay\nDaniel Sabogal\nDaurnimator\nDavid Carlier\nDavid Edelsohn\nDenys Vlasenko\nDmitry Ivanov\nDmitry V. Levin\nDrew DeVault\nEmil Renner Berthing\nFangrui Song\nFelix Fietkau\nFelix Janda\nGianluca Anzolin\nHauke Mehrtens\nHe X\nHiltjo Posthuma\nIsaac Dunham\nJaydeep Patil\nJens Gustedt\nJeremy Huntwork\nJo-Philipp Wich\nJoakim Sindholt\nJohn Spencer\nJulien Ramseier\nJustin Cormack\nKaarle Ritvanen\nKhem Raj\nKylie McClain\nLeah Neukirchen\nLuca Barbato\nLuka Perkov\nM Farkas-Dyck (Strake)\nMahesh Bodapati\nMarkus Wichmann\nMasanori Ogino\nMichael Clark\nMichael Forney\nMikhail Kremnyov\nNatanael Copa\nNicholas J. Kain\norc\nPascal Cuoq\nPatrick Oppenlander\nPetr Hosek\nPetr Skocik\nPierre Carrier\nReini Urban\nRich Felker\nRichard Pennington\nRyan Fairfax\nSamuel Holland\nSegev Finer\nShiz\nsin\nSolar Designer\nStefan Kristiansson\nStefan O'Rear\nSzabolcs Nagy\nTimo Teräs\nTrutz Behn\nValentin Ochs\nWill Dietz\nWilliam Haddon\nWilliam Pitcock\n\nPortions of this software are derived from third-party works licensed\nunder terms compatible with the above MIT license:\n\nThe TRE regular expression implementation (src/regex/reg* and\nsrc/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed\nunder a 2-clause BSD license (license text in the source files). The\nincluded version has been heavily modified by Rich Felker in 2012, in\nthe interests of size, simplicity, and namespace cleanliness.\n\nMuch of the math library code (src/math/* and src/complex/*) is\nCopyright © 1993,2004 Sun Microsystems or\nCopyright © 2003-2011 David Schultz or\nCopyright © 2003-2009 Steven G. Kargl or\nCopyright © 2003-2009 Bruce D. Evans or\nCopyright © 2008 Stephen L. Moshier or\nCopyright © 2017-2018 Arm Limited\nand labelled as such in comments in the individual source files. All\nhave been licensed under extremely permissive terms.\n\nThe ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008\nThe Android Open Source Project and is licensed under a two-clause BSD\nlicense. It was taken from Bionic libc, used on Android.\n\nThe AArch64 memcpy and memset code (src/string/aarch64/*) are\nCopyright © 1999-2019, Arm Limited.\n\nThe implementation of DES for crypt (src/crypt/crypt_des.c) is\nCopyright © 1994 David Burren. It is licensed under a BSD license.\n\nThe implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was\noriginally written by Solar Designer and placed into the public\ndomain. The code also comes with a fallback permissive license for use\nin jurisdictions that may not recognize the public domain.\n\nThe smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011\nValentin Ochs and is licensed under an MIT-style license.\n\nThe x86_64 port was written by Nicholas J. Kain and is licensed under\nthe standard MIT terms.\n\nThe mips and microblaze ports were originally written by Richard\nPennington for use in the ellcc project. The original code was adapted\nby Rich Felker for build system and code conventions during upstream\nintegration. It is licensed under the standard MIT terms.\n\nThe mips64 port was contributed by Imagination Technologies and is\nlicensed under the standard MIT terms.\n\nThe powerpc port was also originally written by Richard Pennington,\nand later supplemented and integrated by John Spencer. It is licensed\nunder the standard MIT terms.\n\nAll other files which have no copyright comments are original works\nproduced specifically for use as part of this library, written either\nby Rich Felker, the main author of the library, or by one or more\ncontibutors listed above. Details on authorship of individual files\ncan be found in the git version control history of the project. The\nomission of copyright and license comments in each file is in the\ninterest of source tree size.\n\nIn addition, permission is hereby granted for all public header files\n(include/* and arch/*/bits/*) and crt files intended to be linked into\napplications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit\nthe copyright notice and permission notice otherwise required by the\nlicense, and to use these files without any requirement of\nattribution. These files include substantial contributions from:\n\nBobby Bingham\nJohn Spencer\nNicholas J. Kain\nRich Felker\nRichard Pennington\nStefan Kristiansson\nSzabolcs Nagy\n\nall of whom have explicitly granted such permission.\n\nThis file previously contained text expressing a belief that most of\nthe files covered by the above exception were sufficiently trivial not\nto be subject to copyright, resulting in confusion over whether it\nnegated the permissions granted in the license. In the spirit of\npermissive licensing, and of not having licensing issues being an\nobstacle to adoption, that text has been removed.\n"
  },
  {
    "path": "wa/workloads/rt_app/bin/x86_64/README.rt-app",
    "content": "Sources of rt-app available at:\nGit commit: 857d6a6624469ba275a37493a10ebba00a50b467\nGit repository: https://github.com/douglas-raillard-arm/rt-app.git\n\n\nBuild host info:\n\nNAME=\"Alpine Linux\"\nID=alpine\nVERSION_ID=3.18.3\nPRETTY_NAME=\"Alpine Linux v3.18\"\nHOME_URL=\"https://alpinelinux.org/\"\nBUG_REPORT_URL=\"https://gitlab.alpinelinux.org/alpine/aports/-/issues\"\n\n\nBuild recipe:\n\nexport ARCH=x86_64\nexport BUILD_DIR=/tmp/tmpjnvvg6dg/x86_64/source\nexport LISA_ARCH_ASSETS=/lisa/_assets/binaries/x86_64\nexport LISA_HOME=''\n#! /bin/bash\n\nALPINE_VERSION=v3.18\nALPINE_BUILD_DEPENDENCIES=(autoconf automake bash cmake gcc git make libtool linux-headers musl-dev)\n\nbuild_jsonc() {\n    # As recommended in the README, build in a separate tree. The folder needs\n    # to be called \"json-c\" as rt-app will #include <json-c/json.h>\n    mkdir json-c\n    cd json-c\n    export ac_cv_func_malloc_0_nonnull=yes\n    export ac_cv_func_realloc_0_nonnull=yes\n    cmake ../json-c -DBUILD_SHARED_LIBS=OFF -DBUILD_STATIC_LIBS=ON -DCMAKE_C_COMPILER=${CROSS_COMPILE}gcc\n    make\n}\n\nbuild_numactl() {\n    cd numactl\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST --disable-shared --enable-static\n    make\n}\n\nbuild_rtapp() {\n    cd rt-app\n    export ac_cv_lib_json_c_json_object_from_file=yes\n    export ac_cv_lib_numa_numa_available=yes\n    ./autogen.sh\n    ./configure --host=$CONFIGURE_HOST LDFLAGS=\"--static -L$BUILD_DIR/json-c/ -L$BUILD_DIR/numactl/\" \\\n                CFLAGS=\"-I$BUILD_DIR -I$BUILD_DIR/numactl -I$BUILD_DIR/json-c\" --with-deadline\n    make\n    \"$CROSS_COMPILE\"strip src/rt-app\n}\n\ndownload() {\n    git clone https://github.com/json-c/json-c json-c\n    # git -C json-c checkout ddd049045d98dd3163d01a7d79184b3c7fb95a14\n    git -C json-c checkout json-c-0.16-20220414\n\n    git clone https://github.com/numactl/numactl.git\n    git -C numactl checkout v2.0.16\n\n    git clone https://github.com/douglas-raillard-arm/rt-app.git\n    # Branch: lisa\n    git -C rt-app checkout 857d6a6624469ba275a37493a10ebba00a50b467\n\n}\n\nbuild() {\n    (build_jsonc) && (build_numactl) && (build_rtapp)\n}\n\ninstall() {\n    source \"$LISA_HOME/tools/recipes/utils.sh\"\n    cp -v rt-app/src/rt-app \"$LISA_ARCH_ASSETS/rt-app\"\n\n    install_readme rt-app rt-app COPYING.in\n    # According to the readme, libnumactl is under the\n    # GNU Lesser General Public License, v2.1.\n    install_readme libnumactl numactl LICENSE.LGPL2.1\n    install_readme json-c json-c COPYING\n}\n\n\nThe sources were distributed under the following licence (content of rt-app/COPYING.in):\n\n\t\t    GNU GENERAL PUBLIC LICENSE\n\t\t       Version 2, June 1991\n\n Copyright (C) 1989, 1990, 1991, 1992 Free Software Foundation, Inc.\n 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n\t\t\t    Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Library General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\f\n\t\t    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\f\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\f\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\f\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n\t\t\t    NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n\t\t     END OF TERMS AND CONDITIONS\n\f\n\tAppendix: How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) 19yy  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program; if not, write to the Free Software\n    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) 19yy name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Library General\nPublic License instead of this License.\n\n\nThe sources were compiled with musl-libc (content of COPYRIGHT):\n\nmusl as a whole is licensed under the following standard MIT license:\n\n----------------------------------------------------------------------\nCopyright © 2005-2020 Rich Felker, et al.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n----------------------------------------------------------------------\n\nAuthors/contributors include:\n\nA. Wilcox\nAda Worcester\nAlex Dowad\nAlex Suykov\nAlexander Monakov\nAndre McCurdy\nAndrew Kelley\nAnthony G. Basile\nAric Belsito\nArvid Picciani\nBartosz Brachaczek\nBenjamin Peterson\nBobby Bingham\nBoris Brezillon\nBrent Cook\nChris Spiegel\nClément Vasseur\nDaniel Micay\nDaniel Sabogal\nDaurnimator\nDavid Carlier\nDavid Edelsohn\nDenys Vlasenko\nDmitry Ivanov\nDmitry V. Levin\nDrew DeVault\nEmil Renner Berthing\nFangrui Song\nFelix Fietkau\nFelix Janda\nGianluca Anzolin\nHauke Mehrtens\nHe X\nHiltjo Posthuma\nIsaac Dunham\nJaydeep Patil\nJens Gustedt\nJeremy Huntwork\nJo-Philipp Wich\nJoakim Sindholt\nJohn Spencer\nJulien Ramseier\nJustin Cormack\nKaarle Ritvanen\nKhem Raj\nKylie McClain\nLeah Neukirchen\nLuca Barbato\nLuka Perkov\nM Farkas-Dyck (Strake)\nMahesh Bodapati\nMarkus Wichmann\nMasanori Ogino\nMichael Clark\nMichael Forney\nMikhail Kremnyov\nNatanael Copa\nNicholas J. Kain\norc\nPascal Cuoq\nPatrick Oppenlander\nPetr Hosek\nPetr Skocik\nPierre Carrier\nReini Urban\nRich Felker\nRichard Pennington\nRyan Fairfax\nSamuel Holland\nSegev Finer\nShiz\nsin\nSolar Designer\nStefan Kristiansson\nStefan O'Rear\nSzabolcs Nagy\nTimo Teräs\nTrutz Behn\nValentin Ochs\nWill Dietz\nWilliam Haddon\nWilliam Pitcock\n\nPortions of this software are derived from third-party works licensed\nunder terms compatible with the above MIT license:\n\nThe TRE regular expression implementation (src/regex/reg* and\nsrc/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed\nunder a 2-clause BSD license (license text in the source files). The\nincluded version has been heavily modified by Rich Felker in 2012, in\nthe interests of size, simplicity, and namespace cleanliness.\n\nMuch of the math library code (src/math/* and src/complex/*) is\nCopyright © 1993,2004 Sun Microsystems or\nCopyright © 2003-2011 David Schultz or\nCopyright © 2003-2009 Steven G. Kargl or\nCopyright © 2003-2009 Bruce D. Evans or\nCopyright © 2008 Stephen L. Moshier or\nCopyright © 2017-2018 Arm Limited\nand labelled as such in comments in the individual source files. All\nhave been licensed under extremely permissive terms.\n\nThe ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008\nThe Android Open Source Project and is licensed under a two-clause BSD\nlicense. It was taken from Bionic libc, used on Android.\n\nThe AArch64 memcpy and memset code (src/string/aarch64/*) are\nCopyright © 1999-2019, Arm Limited.\n\nThe implementation of DES for crypt (src/crypt/crypt_des.c) is\nCopyright © 1994 David Burren. It is licensed under a BSD license.\n\nThe implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was\noriginally written by Solar Designer and placed into the public\ndomain. The code also comes with a fallback permissive license for use\nin jurisdictions that may not recognize the public domain.\n\nThe smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011\nValentin Ochs and is licensed under an MIT-style license.\n\nThe x86_64 port was written by Nicholas J. Kain and is licensed under\nthe standard MIT terms.\n\nThe mips and microblaze ports were originally written by Richard\nPennington for use in the ellcc project. The original code was adapted\nby Rich Felker for build system and code conventions during upstream\nintegration. It is licensed under the standard MIT terms.\n\nThe mips64 port was contributed by Imagination Technologies and is\nlicensed under the standard MIT terms.\n\nThe powerpc port was also originally written by Richard Pennington,\nand later supplemented and integrated by John Spencer. It is licensed\nunder the standard MIT terms.\n\nAll other files which have no copyright comments are original works\nproduced specifically for use as part of this library, written either\nby Rich Felker, the main author of the library, or by one or more\ncontibutors listed above. Details on authorship of individual files\ncan be found in the git version control history of the project. The\nomission of copyright and license comments in each file is in the\ninterest of source tree size.\n\nIn addition, permission is hereby granted for all public header files\n(include/* and arch/*/bits/*) and crt files intended to be linked into\napplications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit\nthe copyright notice and permission notice otherwise required by the\nlicense, and to use these files without any requirement of\nattribution. These files include substantial contributions from:\n\nBobby Bingham\nJohn Spencer\nNicholas J. Kain\nRich Felker\nRichard Pennington\nStefan Kristiansson\nSzabolcs Nagy\n\nall of whom have explicitly granted such permission.\n\nThis file previously contained text expressing a belief that most of\nthe files covered by the above exception were sufficiently trivial not\nto be subject to copyright, resulting in confusion over whether it\nnegated the permissions granted in the license. In the spirit of\npermissive licensing, and of not having licensing issues being an\nobstacle to adoption, that text has been removed.\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/browser-long.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"BrowserMain\" : {\n\t\t\t\"loop\" : 3,\n\t\t\t\"phases\" : {\n\t\t\t\t\"start\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"sleep\" : 400000,\n\t\t\t\t\t\"run\" : 15000,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"run\" :  7000,\n\t\t\t\t\t\"sleep\" : 8000\n\t\t\t\t},\n\t\t\t\t\"render1\" : {\n\t\t\t\t\t\"loop\" : 50,\n\t\t\t\t\t\"resume\" : \"BrowserSub\",\n\t\t\t\t\t\"run\" : 3000\n\t\t\t\t},\n\t\t\t\t\"render2\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"run\" : 10000,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"run\" :  5000\n\t\t\t\t},\n\t\t\t\t\"render3\" : {\n\t\t\t\t\t\"loop\" : 20,\n\t\t\t\t\t\"resume\" : \"BrowserSub\",\n\t\t\t\t\t\"run\" : 3000\n\t\t\t\t},\n\t\t\t\t\"stop\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"run\" :     2000,\n\t\t\t\t\t\"sleep\" : 200000,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"sleep\" : 600000\n\t\t\t\t},\n\t\t\t\t\"scroll\" : {\n\t\t\t\t\t\"loop\" : 4,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"suspend\" : \"BrowserNext\",\n\t\t\t\t\t\"run\" : 1000\n\t\t\t\t},\n\t\t\t\t\"stop2\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"run\" : 200,\n\t\t\t\t\t\"sleep\" : 800000\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"BrowserSub1\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"BrowserSub\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"BrowserSub2\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"BrowserSub\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"BrowserDisplay\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"BrowserNext\",\n\t\t\t\"run\" : 12000,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"sync\" : { \"ref\" : \"queue11\", \"mutex\": \"mutex11\" },\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"Binder-display\",\n\t\t\t\"run\" : 400\n\t\t},\n\t\t\"Binder-dummy\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"wait\" : { \"ref\" : \"queue11\", \"mutex\": \"mutex11\" },\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 200,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"signal\" : \"queue11\",\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"Binder-display\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder-display\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"Event-Browser\",\n\t\t\t\"resume\" : \"Event-Display\"\n\t\t},\n\t\t\"Event-Browser\" : {\n\t\t\t\"priority\" : -9,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Event-Browser\",\n\t\t\t\"run\" : 50,\n\t\t\t\"sleep\" : 16000,\n\t\t\t\"run\" : 50,\n\t\t\t\"resume\" : \"Browser\"\n\t\t},\n\t\t\"Event-Display\" : {\n\t\t\t\"priority\" : -9,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Event-Display\",\n\t\t\t\"run\" : 50,\n\t\t\t\"sleep\" : 16000,\n\t\t\t\"run\" : 50,\n\t\t\t\"resume\" : \"Display\"\n\t\t},\n\t\t\"Display\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Display\",\n\t\t\t\"run\" : 16000\n\t\t},\n\t},\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 600,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"web\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/browser-short.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"BrowserMain\" : {\n\t\t\t\"loop\" : 3,\n\t\t\t\"phases\" : {\n\t\t\t\t\"start\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"sleep\" : 400000,\n\t\t\t\t\t\"run\" : 15000,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"run\" :  7000,\n\t\t\t\t\t\"sleep\" : 8000\n\t\t\t\t},\n\t\t\t\t\"render1\" : {\n\t\t\t\t\t\"loop\" : 50,\n\t\t\t\t\t\"resume\" : \"BrowserSub\",\n\t\t\t\t\t\"run\" : 3000\n\t\t\t\t},\n\t\t\t\t\"render2\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"run\" : 10000,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"run\" :  5000\n\t\t\t\t},\n\t\t\t\t\"render3\" : {\n\t\t\t\t\t\"loop\" : 20,\n\t\t\t\t\t\"resume\" : \"BrowserSub\",\n\t\t\t\t\t\"run\" : 3000\n\t\t\t\t},\n\t\t\t\t\"stop\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"run\" :     2000,\n\t\t\t\t\t\"sleep\" : 200000,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"sleep\" : 600000\n\t\t\t\t},\n\t\t\t\t\"scroll\" : {\n\t\t\t\t\t\"loop\" : 4,\n\t\t\t\t\t\"resume\" : \"Browser\",\n\t\t\t\t\t\"suspend\" : \"BrowserNext\",\n\t\t\t\t\t\"run\" : 1000\n\t\t\t\t},\n\t\t\t\t\"stop2\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\t\t\"run\" : 200,\n\t\t\t\t\t\"sleep\" : 800000\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"BrowserSub1\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"BrowserSub\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"BrowserSub2\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"BrowserSub\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"BrowserDisplay\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Browser\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"BrowserNext\",\n\t\t\t\"run\" : 12000,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"sync\" : { \"ref\" : \"queue11\", \"mutex\": \"mutex11\" },\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"Binder-display\",\n\t\t\t\"run\" : 400\n\t\t},\n\t\t\"Binder-dummy\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"wait\" : { \"ref\" : \"queue11\", \"mutex\": \"mutex11\" },\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 200,\n\t\t\t\"lock\" : \"mutex11\",\n\t\t\t\"signal\" : \"queue11\",\n\t\t\t\"unlock\" : \"mutex11\",\n\t\t\t\"run\" : 100\n\t\t},\n\t\t\"Binder-display\" : {\n\t\t\t\"priority\" : -6,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder-display\",\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" : \"Event-Browser\",\n\t\t\t\"resume\" : \"Event-Display\"\n\t\t},\n\t\t\"Event-Browser\" : {\n\t\t\t\"priority\" : -9,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Event-Browser\",\n\t\t\t\"run\" : 50,\n\t\t\t\"sleep\" : 16000,\n\t\t\t\"run\" : 50,\n\t\t\t\"resume\" : \"Browser\"\n\t\t},\n\t\t\"Event-Display\" : {\n\t\t\t\"priority\" : -9,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Event-Display\",\n\t\t\t\"run\" : 50,\n\t\t\t\"sleep\" : 16000,\n\t\t\t\"run\" : 50,\n\t\t\t\"resume\" : \"Display\"\n\t\t},\n\t\t\"Display\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Display\",\n\t\t\t\"run\" : 16000\n\t\t},\n\t},\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 6,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"web\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/camera-long.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"surfaceflinger\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\": \"surfaceflinger\",\n\t\t\t\"run\" : 1500,\n\t\t\t\"resume\" : \"Binder1\"\n\t\t},\n\n\t\t\"emulate_irq\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"rpmsg_tx_tsk\",\n\t\t\t\"run\" : 50,\n\t\t\t\"timer\" : { \"ref\" : \"timerA\", \"period\" : 33333 }\n\t\t},\n\n\t\t\"rpmsg_tx_tsk\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"sleep\" : 3500\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"sleep\" : 4000\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"suspend\" : \"rpmsg_tx_tsk\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispack\" : {\n\t\t\t\"priority\" : -20,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 400,\n\t\t\t\t\t\"resume\" : \"ispout_1\",\n\t\t\t\t\t\"resume\" : \"ispin_1\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ispout_2\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"ispout_8\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\": {\n\t\t\t\t\t\"resume\" : \"ispout_6\",\n\t\t\t\t\t\"run\" : 200\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t},\n\n\t\t\t\t\"p6\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_1\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_1_queue\", \"mutex\": \"ispout_1_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"sleep\" : 30,\n\t\t\t\t\t\"lock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"wait1\" : { \"ref\" : \"ispout_1_queue\", \"mutex\": \"ispout_1_mutex\" },\n\t\t\t\t\t\"unlock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"run\" : 1500\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"fork1\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"fork2\"\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"resume\" : \"fork3\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_2\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_2_queue\", \"mutex\": \"ispout_2_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"run\" : 350,\n\t\t\t\t\t\"resume\" : \"SceneDetection\",\n\t\t\t\t\t\"run1\" : 200\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"FaceDetection\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"Misc\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"resume\" : \"AssitAF\",\n\t\t\t\t\t\"sleep\" : 20,\n\t\t\t\t\t\"run\" : 600\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_6\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_6\",\n\t\t\t\t\t\"run\" : 1000\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ev2\",\n\t\t\t\t\t\"run\" : 400\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_8_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_8_mutex\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"lock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_2_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"resume\" : \"pl2\"\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"lock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_1_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"run\" : 150,\n\t\t\t\t\t\"lock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"signal1\" : \"ispout_1_queue\",\n\t\t\t\t\t\"unlock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_8\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_8\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_8_queue\", \"mutex\": \"ispout_8_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"ispreproc\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispin_1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_1\",\n\t\t\t\"run\" : 180,\n\t\t\t\"resume\" : \"isp_0\",\n\n\t\t\t\"lock\" : \"ispin_1_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"ispin_1_queue\", \"mutex\": \"ispin_1_mutex\" },\n\t\t\t\"unlock\" : \"ispin_1_mutex\",\n\t\t\t\"run\" : 50,\n\t\t},\n\n\t\t\"ispin_2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_2\",\n\t\t\t\"run\" : 50,\n\t\t},\n\n\n\t\t\"ispin_8\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_8\",\n\t\t\t\"run\" : 125,\n\t\t},\n\n\t\t\"isp_0\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_1\",\n\t\t\t\"run\" : 1500,\n\n\t\t\t\"lock\" : \"ispin_1_mutex\",\n\t\t\t\"signal\" : \"ispin_1_queue\",\n\t\t\t\"unlock\" : \"ispin_1_mutex\",\n\n\t\t\t\"resume\" : \"ispin_2\",\n\n\n\t\t\t\"run\" : 50,\n\n\t\t\t\"resume\" : \"ispin_8\",\n\t\t},\n\n\t\t\"SceneDetection\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"SceneDetection\",\n\t\t\t\t\t\"run\" : 3500,\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"signal\" : \"ev2_queue\",\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"suspend\" :  \"SceneDetection\",\n\t\t\t\t\t\"run\" : 6500,\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"signal\" : \"ev2_queue\",\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"FaceDetection\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 5736,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 7626,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 2405,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 8184,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispreproc\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispreproc\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispreproc_queue\", \"mutex\": \"ispreproc_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ev2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ev2\",\n\t\t\t\t\t\"run\" : 260\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ispreq\",\n\t\t\t\t\t\"run\" : 260,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ev2_queue\", \"mutex\": \"ev2_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"signal\" : \"ispreproc_queue\",\n\t\t\t\t\t\"unlock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"run\" : 110\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"Misc\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"Misc\",\n\t\t\t\t       \t\"run\" : 178\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"AssitAF\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"AssitAF\",\n\t\t\t\t\t\"run\" : 178\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispreq\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispreq\",\n\t\t\t\t\t\"run\" : 180\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"pl2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"pl2\",\n\t\t\t\t\t\"run\" : 285,\n\t\t\t\t\t\"resume\" : \"CameraData\"\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 11848,\n\t\t\t\t\t\"run\" : 896\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork1\",\n\t\t\t\t\t\"run\" : 182\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase1\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase1\",\n\t\t\t\t\t\"run\" : 7000,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork2\",\n\t\t\t\t\t\"run\" : 82\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase2\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase2\",\n\t\t\t\t\t\"run\" : 6400,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork3\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork3\",\n\t\t\t\t\t\"run\" : 82\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase3\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase3\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase3\",\n\t\t\t\t\t\"run\" : 7361\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"SensorService\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"DisplaySync\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 180\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"EventThread1\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"EventThread2\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"sleep\" : 30,\n\t\t\t\t\t\"run\" : 120,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 16667 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\n\t\t\"EventThread1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread1\",\n\t\t\t\t\t\"run\" : 200,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"run1\" : 280,\n\t\t\t\t\t\"resume\": \"surfaceflinger\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 660,\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"sleep1\" : 60,\n\t\t\t\t\t\"run1\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"EventThread2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread2\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"m_camera\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"m_camera\",\n\t\t\t\t\t\"run\" : 660\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"render_thread_mutex\",\n\t\t\t\t\t\"signal\" : \"render_thread_queue\",\n\t\t\t\t\t\"unlock\" : \"render_thread_mutex\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"RenderThread\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"lock\" : \"render_thread_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"render_thread_queue\", \"mutex\": \"render_thread_mutex\" },\n\t\t\t\"unlock\" : \"render_thread_mutex\",\n\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" :  \"mali-render\",\n\t\t\t\"sleep\" : 240,\n\t\t\t\"run1\" : 1000,\n\t\t\t\"sleep1\" : 210,\n\t\t\t\"run2\" : 1040,\n\t\t\t\"sleep2\" : 580,\n\t\t\t\"run3\" : 350\n\n\t\t},\n\n\t\t\"MaliRender\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"suspend\" : \"mali-render\",\n\t\t\t\"run\" : 250,\n\t\t\t\"resume\" :  \"mali-render-hnd\",\n\t\t\t\"sleep\" : 20,\n\t\t\t\"run1\" : 160,\n\t\t\t\"sleep1\" : 1373,\n\t\t\t\"run2\" : 250,\n\t\t\t\"resume2\" :  \"mali-render-hnd\",\n\t\t\t\"sleep2\" : 20,\n\t\t\t\"run3\" : 250,\n\t\t\t\"sleep3\" : 568,\n\t\t\t\"run4\" : 500,\n\t\t\t\"sleep4\" : 30,\n\t\t\t\"run5\" : 300,\n\t\t\t\"resume5\" :  \"mali-render-hnd\",\n\t\t\t\"sleep5\" : 200,\n\t\t\t\"run6\" : 120,\n\n\t\t\t\"resume\": \"surfaceflinger\",\n\t\t},\n\n\t\t\"MaliRenderHnd\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"mali-render-hnd\",\n\t\t\t\"run\" : 150\n\t\t},\n\n\t\t\"AudioTick\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"run\" : 150,\n\t\t\t\t\t\"resume\" : \"AudioIn\",\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 20000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"AudioIn\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"AudioIn\",\n\t\t\t\"run\" : 2730,\n\t\t\t\"resume\" : \"AudioRecord\"\n\t\t},\n\n\t\t\"AudioRecord\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"AudioRecord\",\n\t\t\t\"resume\" : \"pull_looper\",\n\t\t\t\"sleep\" : 2600,\n\t\t\t\"resume1\" : \"pull_looper\"\n\t\t},\n\n\t\t\"pull_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"pull_looper\",\n\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock\" : \"mutex\"\n\t\t},\n\n\t\t\"recoder_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" : 180,\n\n\t\t\t\"lock1\" : \"mutex\",\n\t\t\t\"signal1\" : \"queue\",\n\t\t\t\"wait1\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock1\" : \"mutex\",\n\t\t\t\"run1\" : 130,\n\n\t\t\t\"resume\" : \"gle.acc.encoder\",\n\n\t\t\t\"lock2\" : \"mutex\",\n\t\t\t\"signal2\" : \"queue\",\n\t\t\t\"wait2\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock2\" : \"mutex\",\n\t\t\t\"run2\" : 130\n\t\t},\n\n\t\t\"codec_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" :  130,\n\n\t\t\t\"lock1\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock1\" : \"mutex\",\n\t\t\t\"run1\" :  180,\n\n\t\t\t\"suspend\" : \"codec_looper\",\n\t\t\t\"run2\" :  160,\n\t\t\t\"lock2\" : \"mutex\",\n\t\t\t\"signal2\" : \"queue\",\n\t\t\t\"unlock2\" : \"mutex\"\n\t\t},\n\n\t\t\"gle.acc.encoder\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"gle.acc.encoder\",\n\t\t\t\"run\" : 20,\n\t\t\t\"resume\" : \"OMXCall\",\n\n\t\t\t\"suspend1\" : \"gle.acc.encoder\",\n\t\t\t\"run1\" : 800,\n\t\t\t\"resume1\" : \"OMXCall\"\n\t\t},\n\n\t\t\"OMXCall\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"OMXCall\",\n\t\t\t\"run\" :  130,\n\t\t\t\"resume\" : \"codec_looper\"\n\t\t},\n\n\t\t\"CameraData\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"CameraData\",\n\t\t\t\"run\" :  2000,\n\t\t\t\"lock\" : \"camera_data_mutex\",\n\t\t\t\"signal\" : \"camera_data_queue\",\n\t\t\t\"unlock\" : \"camera_data_mutex\",\n\n\t\t\t\"resume\" : \"Binder1\",\n\t\t\t\"resume1\" : \"Binder2\",\n\t\t\t\"run1\" :  2080,\n\n\t\t\t\"lock1\" : \"camera_data_mutex\",\n\t\t\t\"signal1\" : \"camera_data_queue\",\n\t\t\t\"unlock1\" : \"camera_data_mutex\",\n\n\t\t\t\"resume2\" : \"Binder3\"\n\t\t},\n\n\t\t\"CameraDataProc\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_data_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_data_queue\", \"mutex\": \"camera_data_mutex\" },\n\t\t\t\"unlock\" : \"camera_data_mutex\",\n\t\t\t\"run\" :  150,\n\n\t\t\t\"lock1\" : \"camera_stream1_mutex\",\n\t\t\t\"signal\" : \"camera_stream1_queue\",\n\t\t\t\"unlock1\" : \"camera_stream1_mutex\",\n\n\t\t\t\"lock2\" : \"camera_data_mutex\",\n\t\t\t\"wait2\" : { \"ref\" : \"camera_data_queue\", \"mutex\": \"camera_data_mutex\" },\n\t\t\t\"unlock2\" : \"camera_data_mutex\",\n\t\t\t\"run2\" :  1000,\n\n\t\t\t\"resume\" : \"Binder1\"\n\t\t},\n\n\t\t\"CameraStream1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream1_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream1_queue\", \"mutex\": \"camera_stream1_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream1_mutex\",\n\t\t\t\"run\" :  240,\n\n\t\t\t\"resume\" : \"Binder1\",\n\n\t\t\t\"lock1\" : \"camera_stream2_mutex\",\n\t\t\t\"signal\" : \"camera_stream2_queue\",\n\t\t\t\"unlock1\" : \"camera_stream2_mutex\",\n\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run1\" :  240,\n\n\t\t\t\"lock2\" : \"camera_stream3_mutex\",\n\t\t\t\"signal2\" : \"camera_stream3_queue\",\n\t\t\t\"unlock2\" : \"camera_stream3_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\n\t\t\t\"lock3\" : \"camera_req_mutex\",\n\t\t\t\"signal3\" : \"camera_req_queue\",\n\t\t\t\"unlock3\" : \"camera_req_mutex\"\n\t\t},\n\n\t\t\"CameraStream2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream2_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream2_queue\", \"mutex\": \"camera_stream2_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream2_mutex\",\n\t\t\t\"run\" :  180,\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run1\" :  240,\n\t\t\t\"sleep1\" : 850,\n\t\t\t\"run2\" :  90,\n\n\t\t\t\"resume\" : \"Binder1\",\n\n\t\t\t\"lock2\" : \"camera_req_mutex\",\n\t\t\t\"signal\" : \"camera_req_queue\",\n\t\t\t\"unlock2\" : \"camera_req_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\t\t},\n\n\t\t\"CameraStream3\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream3_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream3_queue\", \"mutex\": \"camera_stream3_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream3_mutex\",\n\t\t\t\"run\" :  90,\n\n\t\t\t\"lock1\" : \"eb_mutex\",\n\t\t\t\"signal\" : \"eb_queue\",\n\t\t\t\"unlock1\" : \"eb_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\t\t},\n\n\t\t\"CameraReqQueue\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_req_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_req_queue\", \"mutex\": \"camera_req_mutex\" },\n\t\t\t\"unlock\" : \"camera_req_mutex\",\n\t\t\t\"run\" :  200,\n\n\t\t\t\"lock1\" : \"camera_req_mutex\",\n\t\t\t\"wait1\" : { \"ref\" : \"camera_req_queue\", \"mutex\": \"camera_req_mutex\" },\n\t\t\t\"unlock1\" : \"camera_req_mutex\",\n\t\t\t\"run1\" :  200,\n\n\t\t\t\"resume\" : \"Binder3\",\n\n\t\t\t\"sleep\" : 120,\n\t\t\t\"run2\" : 200,\n\n\t\t\t\"resume2\" : \"Binder2\",\n\n\t\t\t\"sleep2\" : 1900,\n\t\t\t\"run3\" : 270,\n\t\t\t\"lock3\" : \"camera_s0_mutex\",\n\t\t\t\"signal\" : \"camera_s0_queue\",\n\t\t\t\"unlock3\" : \"camera_s0_mutex\",\n\n\t\t\t\"resume3\" : \"Binder1\",\n\n\t\t\t\"sleep3\" : 560,\n\t\t\t\"run4\" : 700,\n\t\t\t\"lock4\" : \"camera_s1_mutex\",\n\t\t\t\"signal4\" : \"camera_s1_queue\",\n\t\t\t\"unlock4\" : \"camera_s1_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\n\t\t\t\"sleep4\" : 533,\n\t\t\t\"run5\" : 300,\n\t\t\t\"lock5\" : \"camera_s2_mutex\",\n\t\t\t\"signal5\" : \"camera_s2_queue\",\n\t\t\t\"unlock5\" : \"camera_s2_mutex\"\n\t\t},\n\n\t\t\"CameraS0\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_s0_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_s0_queue\", \"mutex\": \"camera_s0_mutex\" },\n\t\t\t\"unlock\" : \"camera_s0_mutex\",\n\t\t\t\"run\" :  300,\n\n\t\t\t\"lock1\" : \"camera_s1_mutex\",\n\t\t\t\"wait1\" : { \"ref\" : \"camera_s1_queue\", \"mutex\": \"camera_s1_mutex\" },\n\t\t\t\"unlock1\" : \"camera_s1_mutex\",\n\t\t\t\"run1\" :  300,\n\n\t\t\t\"lock2\" : \"camera_s2_mutex\",\n\t\t\t\"wait2\" : { \"ref\" : \"camera_s2_queue\", \"mutex\": \"camera_s2_mutex\" },\n\t\t\t\"unlock2\" : \"camera_s2_mutex\",\n\t\t\t\"run2\" :  400,\n\n\t\t\t\"sleep\" : 900,\n\t\t\t\"run3\" :  380,\n\t\t\t\"sleep3\" : 250,\n\t\t\t\"run4\" :  278\n\t\t},\n\n\t\t\"EmptyBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"eb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"eb_queue\", \"mutex\": \"eb_mutex\" },\n\t\t\t\"unlock\" : \"eb_mutex\",\n\t\t\t\"run\" :  240,\n\n\t\t\t\"lock1\" : \"encb_mutex\",\n\t\t\t\"signal\" : \"encb_queue\",\n\t\t\t\"unlock1\" : \"encb_mutex\"\n\t\t},\n\n\t\t\"EncodeBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"encb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"encb_queue\", \"mutex\": \"encb_mutex\" },\n\t\t\t\"unlock\" : \"encb_mutex\",\n\t\t\t\"run\" :  370,\n\n\t\t\t\"lock1\" : \"fb_mutex\",\n\t\t\t\"signal\" : \"fb_queue\",\n\t\t\t\"wait1\" : { \"ref\" : \"fb_queue\", \"mutex\": \"fb_mutex\" },\n\t\t\t\"unlock1\" : \"fb_mutex\",\n\t\t\t\"run1\" :  350\n\t\t},\n\n\t\t\"FillBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"fb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"fb_queue\", \"mutex\": \"fb_mutex\" },\n\t\t\t\"unlock\" : \"fb_mutex\",\n\t\t\t\"run\" :  200,\n\n\t\t\t\"sleep\" : 14800,\n\t\t\t\"run1\" :  2400,\n\n\t\t\t\"lock1\" : \"fb_mutex\",\n\t\t\t\"signal\" : \"fb_queue\",\n\t\t\t\"unlock1\" : \"fb_mutex\"\n\t\t},\n\n\t\t\"Binder1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder1\",\n\t\t\t\"run\" :  350\n\t\t},\n\n\t\t\"Binder2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder2\",\n\t\t\t\"run\" :  365\n\t\t},\n\n\t\t\"Binder3\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder3\",\n\t\t\t\"run\" :  369,\n\t\t},\n\n\t\t\"LogdWriter\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"LogdWriter\",\n\t\t\t\"run\" :  300,\n\t\t\t\"resume\" : \"LogdReader\",\n\t\t},\n\n\t\t\"LogdReader\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"LogdReader\",\n\t\t\t\"run\" :  30,\n\t\t}\n\n\t},\n\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 600,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"camera\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/camera-short.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"surfaceflinger\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\": \"surfaceflinger\",\n\t\t\t\"run\" : 1500,\n\t\t\t\"resume\" : \"Binder1\"\n\t\t},\n\n\t\t\"emulate_irq\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"rpmsg_tx_tsk\",\n\t\t\t\"run\" : 50,\n\t\t\t\"timer\" : { \"ref\" : \"timerA\", \"period\" : 33333 }\n\t\t},\n\n\t\t\"rpmsg_tx_tsk\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"sleep\" : 3500\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"sleep\" : 4000\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"run\" : 50,\n\t\t\t\t\t\"resume\" : \"ispack\",\n\t\t\t\t\t\"suspend\" : \"rpmsg_tx_tsk\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispack\" : {\n\t\t\t\"priority\" : -20,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 400,\n\t\t\t\t\t\"resume\" : \"ispout_1\",\n\t\t\t\t\t\"resume\" : \"ispin_1\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ispout_2\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"ispout_8\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\": {\n\t\t\t\t\t\"resume\" : \"ispout_6\",\n\t\t\t\t\t\"run\" : 200\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t},\n\n\t\t\t\t\"p6\" : {\n\t\t\t\t\t\"suspend\" : \"ispack\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_1\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_1_queue\", \"mutex\": \"ispout_1_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"sleep\" : 30,\n\t\t\t\t\t\"lock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"wait1\" : { \"ref\" : \"ispout_1_queue\", \"mutex\": \"ispout_1_mutex\" },\n\t\t\t\t\t\"unlock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"run\" : 1500\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"fork1\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"fork2\"\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"resume\" : \"fork3\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_2\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_2_queue\", \"mutex\": \"ispout_2_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"run\" : 350,\n\t\t\t\t\t\"resume\" : \"SceneDetection\",\n\t\t\t\t\t\"run1\" : 200\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"FaceDetection\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"Misc\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"resume\" : \"AssitAF\",\n\t\t\t\t\t\"sleep\" : 20,\n\t\t\t\t\t\"run\" : 600\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_6\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_6\",\n\t\t\t\t\t\"run\" : 1000\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ev2\",\n\t\t\t\t\t\"run\" : 400\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_8_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_8_mutex\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"lock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_2_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_2_mutex\",\n\t\t\t\t\t\"resume\" : \"pl2\"\n\t\t\t\t},\n\n\t\t\t\t\"p5\" : {\n\t\t\t\t\t\"lock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"signal\" : \"ispout_1_queue\",\n\t\t\t\t\t\"unlock\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"run\" : 150,\n\t\t\t\t\t\"lock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"signal1\" : \"ispout_1_queue\",\n\t\t\t\t\t\"unlock1\" : \"ispout_1_mutex\",\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispout_8\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"ispout_8\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispout_8_queue\", \"mutex\": \"ispout_8_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispout_8_mutex\",\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"ispreproc\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispin_1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_1\",\n\t\t\t\"run\" : 180,\n\t\t\t\"resume\" : \"isp_0\",\n\n\t\t\t\"lock\" : \"ispin_1_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"ispin_1_queue\", \"mutex\": \"ispin_1_mutex\" },\n\t\t\t\"unlock\" : \"ispin_1_mutex\",\n\t\t\t\"run\" : 50,\n\t\t},\n\n\t\t\"ispin_2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_2\",\n\t\t\t\"run\" : 50,\n\t\t},\n\n\n\t\t\"ispin_8\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_8\",\n\t\t\t\"run\" : 125,\n\t\t},\n\n\t\t\"isp_0\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" :  \"ispin_1\",\n\t\t\t\"run\" : 1500,\n\n\t\t\t\"lock\" : \"ispin_1_mutex\",\n\t\t\t\"signal\" : \"ispin_1_queue\",\n\t\t\t\"unlock\" : \"ispin_1_mutex\",\n\n\t\t\t\"resume\" : \"ispin_2\",\n\n\n\t\t\t\"run\" : 50,\n\n\t\t\t\"resume\" : \"ispin_8\",\n\t\t},\n\n\t\t\"SceneDetection\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" :  \"SceneDetection\",\n\t\t\t\t\t\"run\" : 3500,\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"signal\" : \"ev2_queue\",\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"suspend\" :  \"SceneDetection\",\n\t\t\t\t\t\"run\" : 6500,\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"signal\" : \"ev2_queue\",\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"FaceDetection\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 5736,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 7626,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 2405,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"suspend\" : \"FaceDetection\",\n\t\t\t\t\t\"run\" : 8184,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispreproc\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispreproc\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ispreproc_queue\", \"mutex\": \"ispreproc_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ev2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ev2\",\n\t\t\t\t\t\"run\" : 260\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ispreq\",\n\t\t\t\t\t\"run\" : 260,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ev2_mutex\",\n\t\t\t\t\t\"wait\" : { \"ref\" : \"ev2_queue\", \"mutex\": \"ev2_mutex\" },\n\t\t\t\t\t\"unlock\" : \"ev2_mutex\",\n\t\t\t\t\t\"run\" : 140\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"lock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"signal\" : \"ispreproc_queue\",\n\t\t\t\t\t\"unlock\" : \"ispreproc_mutex\",\n\t\t\t\t\t\"run\" : 110\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"Misc\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"Misc\",\n\t\t\t\t       \t\"run\" : 178\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"AssitAF\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"AssitAF\",\n\t\t\t\t\t\"run\" : 178\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ispreq\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ispreq\",\n\t\t\t\t\t\"run\" : 180\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"pl2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"pl2\",\n\t\t\t\t\t\"run\" : 285,\n\t\t\t\t\t\"resume\" : \"CameraData\"\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 11848,\n\t\t\t\t\t\"run\" : 896\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork1\",\n\t\t\t\t\t\"run\" : 182\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase1\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase1\",\n\t\t\t\t\t\"run\" : 7000,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork2\",\n\t\t\t\t\t\"run\" : 82\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase2\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase2\",\n\t\t\t\t\t\"run\" : 6400,\n\t\t\t\t\t\"resume\" : \"LogdWriter\",\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"fork3\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"fork3\",\n\t\t\t\t\t\"run\" : 82\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"ThumbnailBase3\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"ThumbnailBase3\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"ThumbnailBase3\",\n\t\t\t\t\t\"run\" : 7361\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"SensorService\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 3000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"DisplaySync\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"run\" : 180\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"resume\" : \"EventThread1\"\n\t\t\t\t},\n\n\t\t\t\t\"p3\" : {\n\t\t\t\t\t\"resume\" : \"EventThread2\"\n\t\t\t\t},\n\n\t\t\t\t\"p4\" : {\n\t\t\t\t\t\"sleep\" : 30,\n\t\t\t\t\t\"run\" : 120,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 16667 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\n\t\t\"EventThread1\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread1\",\n\t\t\t\t\t\"run\" : 200,\n\t\t\t\t\t\"resume\" : \"m_camera\",\n\t\t\t\t\t\"run1\" : 280,\n\t\t\t\t\t\"resume\": \"surfaceflinger\",\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 660,\n\t\t\t\t\t\"run\" : 300,\n\t\t\t\t\t\"sleep1\" : 60,\n\t\t\t\t\t\"run1\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"EventThread2\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread2\",\n\t\t\t\t\t\"run\" : 150\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"m_camera\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"m_camera\",\n\t\t\t\t\t\"run\" : 660\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"lock\" : \"render_thread_mutex\",\n\t\t\t\t\t\"signal\" : \"render_thread_queue\",\n\t\t\t\t\t\"unlock\" : \"render_thread_mutex\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"RenderThread\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"lock\" : \"render_thread_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"render_thread_queue\", \"mutex\": \"render_thread_mutex\" },\n\t\t\t\"unlock\" : \"render_thread_mutex\",\n\n\t\t\t\"run\" : 300,\n\t\t\t\"resume\" :  \"mali-render\",\n\t\t\t\"sleep\" : 240,\n\t\t\t\"run1\" : 1000,\n\t\t\t\"sleep1\" : 210,\n\t\t\t\"run2\" : 1040,\n\t\t\t\"sleep2\" : 580,\n\t\t\t\"run3\" : 350\n\n\t\t},\n\n\t\t\"MaliRender\" : {\n\t\t\t\"loop\" : -1,\n\n\t\t\t\"suspend\" : \"mali-render\",\n\t\t\t\"run\" : 250,\n\t\t\t\"resume\" :  \"mali-render-hnd\",\n\t\t\t\"sleep\" : 20,\n\t\t\t\"run1\" : 160,\n\t\t\t\"sleep1\" : 1373,\n\t\t\t\"run2\" : 250,\n\t\t\t\"resume2\" :  \"mali-render-hnd\",\n\t\t\t\"sleep2\" : 20,\n\t\t\t\"run3\" : 250,\n\t\t\t\"sleep3\" : 568,\n\t\t\t\"run4\" : 500,\n\t\t\t\"sleep4\" : 30,\n\t\t\t\"run5\" : 300,\n\t\t\t\"resume5\" :  \"mali-render-hnd\",\n\t\t\t\"sleep5\" : 200,\n\t\t\t\"run6\" : 120,\n\n\t\t\t\"resume\": \"surfaceflinger\",\n\t\t},\n\n\t\t\"MaliRenderHnd\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"mali-render-hnd\",\n\t\t\t\"run\" : 150\n\t\t},\n\n\t\t\"AudioTick\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"run\" : 150,\n\t\t\t\t\t\"resume\" : \"AudioIn\",\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 20000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"AudioIn\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"AudioIn\",\n\t\t\t\"run\" : 2730,\n\t\t\t\"resume\" : \"AudioRecord\"\n\t\t},\n\n\t\t\"AudioRecord\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"AudioRecord\",\n\t\t\t\"resume\" : \"pull_looper\",\n\t\t\t\"sleep\" : 2600,\n\t\t\t\"resume1\" : \"pull_looper\"\n\t\t},\n\n\t\t\"pull_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"pull_looper\",\n\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock\" : \"mutex\"\n\t\t},\n\n\t\t\"recoder_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" : 180,\n\n\t\t\t\"lock1\" : \"mutex\",\n\t\t\t\"signal1\" : \"queue\",\n\t\t\t\"wait1\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock1\" : \"mutex\",\n\t\t\t\"run1\" : 130,\n\n\t\t\t\"resume\" : \"gle.acc.encoder\",\n\n\t\t\t\"lock2\" : \"mutex\",\n\t\t\t\"signal2\" : \"queue\",\n\t\t\t\"wait2\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock2\" : \"mutex\",\n\t\t\t\"run2\" : 130\n\t\t},\n\n\t\t\"codec_looper\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" :  130,\n\n\t\t\t\"lock1\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock1\" : \"mutex\",\n\t\t\t\"run1\" :  180,\n\n\t\t\t\"suspend\" : \"codec_looper\",\n\t\t\t\"run2\" :  160,\n\t\t\t\"lock2\" : \"mutex\",\n\t\t\t\"signal2\" : \"queue\",\n\t\t\t\"unlock2\" : \"mutex\"\n\t\t},\n\n\t\t\"gle.acc.encoder\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"gle.acc.encoder\",\n\t\t\t\"run\" : 20,\n\t\t\t\"resume\" : \"OMXCall\",\n\n\t\t\t\"suspend1\" : \"gle.acc.encoder\",\n\t\t\t\"run1\" : 800,\n\t\t\t\"resume1\" : \"OMXCall\"\n\t\t},\n\n\t\t\"OMXCall\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"OMXCall\",\n\t\t\t\"run\" :  130,\n\t\t\t\"resume\" : \"codec_looper\"\n\t\t},\n\n\t\t\"CameraData\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"CameraData\",\n\t\t\t\"run\" :  2000,\n\t\t\t\"lock\" : \"camera_data_mutex\",\n\t\t\t\"signal\" : \"camera_data_queue\",\n\t\t\t\"unlock\" : \"camera_data_mutex\",\n\n\t\t\t\"resume\" : \"Binder1\",\n\t\t\t\"resume1\" : \"Binder2\",\n\t\t\t\"run1\" :  2080,\n\n\t\t\t\"lock1\" : \"camera_data_mutex\",\n\t\t\t\"signal1\" : \"camera_data_queue\",\n\t\t\t\"unlock1\" : \"camera_data_mutex\",\n\n\t\t\t\"resume2\" : \"Binder3\"\n\t\t},\n\n\t\t\"CameraDataProc\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_data_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_data_queue\", \"mutex\": \"camera_data_mutex\" },\n\t\t\t\"unlock\" : \"camera_data_mutex\",\n\t\t\t\"run\" :  150,\n\n\t\t\t\"lock1\" : \"camera_stream1_mutex\",\n\t\t\t\"signal\" : \"camera_stream1_queue\",\n\t\t\t\"unlock1\" : \"camera_stream1_mutex\",\n\n\t\t\t\"lock2\" : \"camera_data_mutex\",\n\t\t\t\"wait2\" : { \"ref\" : \"camera_data_queue\", \"mutex\": \"camera_data_mutex\" },\n\t\t\t\"unlock2\" : \"camera_data_mutex\",\n\t\t\t\"run2\" :  1000,\n\n\t\t\t\"resume\" : \"Binder1\"\n\t\t},\n\n\t\t\"CameraStream1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream1_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream1_queue\", \"mutex\": \"camera_stream1_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream1_mutex\",\n\t\t\t\"run\" :  240,\n\n\t\t\t\"resume\" : \"Binder1\",\n\n\t\t\t\"lock1\" : \"camera_stream2_mutex\",\n\t\t\t\"signal\" : \"camera_stream2_queue\",\n\t\t\t\"unlock1\" : \"camera_stream2_mutex\",\n\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run1\" :  240,\n\n\t\t\t\"lock2\" : \"camera_stream3_mutex\",\n\t\t\t\"signal2\" : \"camera_stream3_queue\",\n\t\t\t\"unlock2\" : \"camera_stream3_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\n\t\t\t\"lock3\" : \"camera_req_mutex\",\n\t\t\t\"signal3\" : \"camera_req_queue\",\n\t\t\t\"unlock3\" : \"camera_req_mutex\"\n\t\t},\n\n\t\t\"CameraStream2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream2_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream2_queue\", \"mutex\": \"camera_stream2_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream2_mutex\",\n\t\t\t\"run\" :  180,\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run1\" :  240,\n\t\t\t\"sleep1\" : 850,\n\t\t\t\"run2\" :  90,\n\n\t\t\t\"resume\" : \"Binder1\",\n\n\t\t\t\"lock2\" : \"camera_req_mutex\",\n\t\t\t\"signal\" : \"camera_req_queue\",\n\t\t\t\"unlock2\" : \"camera_req_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\t\t},\n\n\t\t\"CameraStream3\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_stream3_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_stream3_queue\", \"mutex\": \"camera_stream3_mutex\" },\n\t\t\t\"unlock\" : \"camera_stream3_mutex\",\n\t\t\t\"run\" :  90,\n\n\t\t\t\"lock1\" : \"eb_mutex\",\n\t\t\t\"signal\" : \"eb_queue\",\n\t\t\t\"unlock1\" : \"eb_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\t\t},\n\n\t\t\"CameraReqQueue\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_req_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_req_queue\", \"mutex\": \"camera_req_mutex\" },\n\t\t\t\"unlock\" : \"camera_req_mutex\",\n\t\t\t\"run\" :  200,\n\n\t\t\t\"lock1\" : \"camera_req_mutex\",\n\t\t\t\"wait1\" : { \"ref\" : \"camera_req_queue\", \"mutex\": \"camera_req_mutex\" },\n\t\t\t\"unlock1\" : \"camera_req_mutex\",\n\t\t\t\"run1\" :  200,\n\n\t\t\t\"resume\" : \"Binder3\",\n\n\t\t\t\"sleep\" : 120,\n\t\t\t\"run2\" : 200,\n\n\t\t\t\"resume2\" : \"Binder2\",\n\n\t\t\t\"sleep2\" : 1900,\n\t\t\t\"run3\" : 270,\n\t\t\t\"lock3\" : \"camera_s0_mutex\",\n\t\t\t\"signal\" : \"camera_s0_queue\",\n\t\t\t\"unlock3\" : \"camera_s0_mutex\",\n\n\t\t\t\"resume3\" : \"Binder1\",\n\n\t\t\t\"sleep3\" : 560,\n\t\t\t\"run4\" : 700,\n\t\t\t\"lock4\" : \"camera_s1_mutex\",\n\t\t\t\"signal4\" : \"camera_s1_queue\",\n\t\t\t\"unlock4\" : \"camera_s1_mutex\",\n\n\t\t\t\"resume\" : \"LogdWriter\",\n\n\t\t\t\"sleep4\" : 533,\n\t\t\t\"run5\" : 300,\n\t\t\t\"lock5\" : \"camera_s2_mutex\",\n\t\t\t\"signal5\" : \"camera_s2_queue\",\n\t\t\t\"unlock5\" : \"camera_s2_mutex\"\n\t\t},\n\n\t\t\"CameraS0\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"camera_s0_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"camera_s0_queue\", \"mutex\": \"camera_s0_mutex\" },\n\t\t\t\"unlock\" : \"camera_s0_mutex\",\n\t\t\t\"run\" :  300,\n\n\t\t\t\"lock1\" : \"camera_s1_mutex\",\n\t\t\t\"wait1\" : { \"ref\" : \"camera_s1_queue\", \"mutex\": \"camera_s1_mutex\" },\n\t\t\t\"unlock1\" : \"camera_s1_mutex\",\n\t\t\t\"run1\" :  300,\n\n\t\t\t\"lock2\" : \"camera_s2_mutex\",\n\t\t\t\"wait2\" : { \"ref\" : \"camera_s2_queue\", \"mutex\": \"camera_s2_mutex\" },\n\t\t\t\"unlock2\" : \"camera_s2_mutex\",\n\t\t\t\"run2\" :  400,\n\n\t\t\t\"sleep\" : 900,\n\t\t\t\"run3\" :  380,\n\t\t\t\"sleep3\" : 250,\n\t\t\t\"run4\" :  278\n\t\t},\n\n\t\t\"EmptyBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"eb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"eb_queue\", \"mutex\": \"eb_mutex\" },\n\t\t\t\"unlock\" : \"eb_mutex\",\n\t\t\t\"run\" :  240,\n\n\t\t\t\"lock1\" : \"encb_mutex\",\n\t\t\t\"signal\" : \"encb_queue\",\n\t\t\t\"unlock1\" : \"encb_mutex\"\n\t\t},\n\n\t\t\"EncodeBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"encb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"encb_queue\", \"mutex\": \"encb_mutex\" },\n\t\t\t\"unlock\" : \"encb_mutex\",\n\t\t\t\"run\" :  370,\n\n\t\t\t\"lock1\" : \"fb_mutex\",\n\t\t\t\"signal\" : \"fb_queue\",\n\t\t\t\"wait1\" : { \"ref\" : \"fb_queue\", \"mutex\": \"fb_mutex\" },\n\t\t\t\"unlock1\" : \"fb_mutex\",\n\t\t\t\"run1\" :  350\n\t\t},\n\n\t\t\"FillBuffer\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"fb_mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"fb_queue\", \"mutex\": \"fb_mutex\" },\n\t\t\t\"unlock\" : \"fb_mutex\",\n\t\t\t\"run\" :  200,\n\n\t\t\t\"sleep\" : 14800,\n\t\t\t\"run1\" :  2400,\n\n\t\t\t\"lock1\" : \"fb_mutex\",\n\t\t\t\"signal\" : \"fb_queue\",\n\t\t\t\"unlock1\" : \"fb_mutex\"\n\t\t},\n\n\t\t\"Binder1\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder1\",\n\t\t\t\"run\" :  350\n\t\t},\n\n\t\t\"Binder2\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder2\",\n\t\t\t\"run\" :  365\n\t\t},\n\n\t\t\"Binder3\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"Binder3\",\n\t\t\t\"run\" :  369,\n\t\t},\n\n\t\t\"LogdWriter\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"LogdWriter\",\n\t\t\t\"run\" :  300,\n\t\t\t\"resume\" : \"LogdReader\",\n\t\t},\n\n\t\t\"LogdReader\" : {\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"LogdReader\",\n\t\t\t\"run\" :  30,\n\t\t}\n\n\t},\n\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 6,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"camera\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/mp3-long.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"AudioTick\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"cpus\" : [0],\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"resume\" : \"AudioOut\",\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 6000 }\n\t\t\t\t},\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"loop\" : 4,\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 6000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"AudioOut\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"run\" :  275,\n\t\t\t\"resume\" : \"AudioTrack\",\n\t\t\t\"run\" : 4725,\n\t\t\t\"suspend\" : \"AudioOut\"\n\t\t},\n\t\t\"AudioTrack\" : { \n\t\t\t\"priority\" : -16,\n\t\t\t\"loop\" : -1,\n\t\t\t\t\"suspend\" : \"AudioTrack\",\n\t\t\t\t\"run\" :  300,\n\t\t\t\t\"resume\" : \"mp3.decoder\"\n\t\t},\n\t\t\"mp3.decoder\" : {\n\t\t\t\"priority\" : -2,\n\t\t\t\"loop\" : -1,\n\t\t\t\t\"suspend\" : \"mp3.decoder\",\n\t\t\t\t\"run\" :  1000,\n\t\t\t\t\"lock\" : \"mutex\",\n\t\t\t\t\"signal\" : \"queue\",\n\t\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\t\"run\" :  150\n\t\t},\n\t\t\"OMXCall\" : {\n\t\t\t\"priority\" : -2,\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" :  300,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock\" : \"mutex\"\n\t\t}\n\t},\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 600,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"mp3\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/mp3-short.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"AudioTick\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"cpus\" : [0],\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 1,\n\t\t\t\t\t\"resume\" : \"AudioOut\",\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 6000 }\n\t\t\t\t},\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"loop\" : 4,\n\t\t\t\t\t\"timer\" :  { \"ref\" : \"tick\", \"period\": 6000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"AudioOut\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"run\" :  275,\n\t\t\t\"resume\" : \"AudioTrack\",\n\t\t\t\"run\" : 4725,\n\t\t\t\"suspend\" : \"AudioOut\"\n\t\t},\n\t\t\"AudioTrack\" : { \n\t\t\t\"priority\" : -16,\n\t\t\t\"loop\" : -1,\n\t\t\t\t\"suspend\" : \"AudioTrack\",\n\t\t\t\t\"run\" :  300,\n\t\t\t\t\"resume\" : \"mp3.decoder\"\n\t\t},\n\t\t\"mp3.decoder\" : {\n\t\t\t\"priority\" : -2,\n\t\t\t\"loop\" : -1,\n\t\t\t\t\"suspend\" : \"mp3.decoder\",\n\t\t\t\t\"run\" :  1000,\n\t\t\t\t\"lock\" : \"mutex\",\n\t\t\t\t\"signal\" : \"queue\",\n\t\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\t\"run\" :  150\n\t\t},\n\t\t\"OMXCall\" : {\n\t\t\t\"priority\" : -2,\n\t\t\t\"loop\" : -1,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"wait\" : { \"ref\" : \"queue\", \"mutex\": \"mutex\" },\n\t\t\t\"unlock\" : \"mutex\",\n\t\t\t\"run\" :  300,\n\t\t\t\"lock\" : \"mutex\",\n\t\t\t\"signal\" : \"queue\",\n\t\t\t\"unlock\" : \"mutex\"\n\t\t}\n\t},\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 6,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"mp3\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/spreading-tasks.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"thread1\" : {\n\t\t\t\"instance\" : 1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"light\" : {\n\t\t\t\t\t\"loop\" : 300,\n\t\t\t\t\t\"run\" : 1000,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n\t\t\t\t},\n\t\t\t\t\"heavy\" : {\n\t\t\t\t\t\"loop\" : 300,\n\t\t\t\t\t\"run\" : 7000,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"thread2\" : {\n\t\t\t\"instance\" : 1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"light1\" : {\n\t\t\t\t\t\"loop\" : 900,\n\t\t\t\t\t\"run\" : 1000,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n\t\t\t\t},\n\t\t\t\t\"heavy1\" : {\n\t\t\t\t\t\"loop\" : 600,\n\t\t\t\t\t\"run\" : 7000,\n\t\t\t\t\t\"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n\t\t\t\t},\n                                \"light2\" : {\n                                        \"loop\" : 300,\n                                        \"run\" : 1000,\n                                        \"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n                                },\n                                \"heavy1\" : {\n                                        \"loop\" : 600,\n                                        \"run\" : 7000,\n                                        \"timer\" : { \"ref\" : \"unique\", \"period\" : 10000 }\n                                },\n\t\t\t}\n\t\t}\n\t},\n\t\"global\" : {\n\t\t\"duration\" : 60,\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/taskset.json",
    "content": "{\n    \"tasks\": {\n        \"ThreadA\": {\n            \"exec\": 5000,\n            \"period\": 24000,\n            \"priority\": -19,\n            \"cpus\": [\n                0\n            ],\n            \"lock_order\": [\n                \"r0\",\n                \"trig1\"\n            ],\n            \"resources\": {\n                \"r0\": {\n                    \"duration\": 1000\n                },\n                \"trig1\": {\n                    \"duration\": 0\n                }\n            }\n        },\n        \"ThreadB\": {\n            \"priority\": -16,\n            \"phases\": {\n                \"phase1\": {\n                    \"exec\": 300,\n                    \"period\": 24000,\n                    \"sleep\": false,\n                    \"loop\": 1,\n                    \"lock_order\": [\n                        \"wait1\",\n                        \"r0\",\n                        \"trig2\"\n                    ],\n                    \"resources\": {\n                        \"wait1\": {\n                            \"duration\": 0,\n                            \"access\": [\n                                \"trig1_mutex\"\n                            ]\n                        },\n                        \"r0\": {\n                            \"duration\": 300\n                        },\n                        \"trig2\": {\n                            \"duration\": 0\n                        }\n                    }\n                },\n                \"phase2\": {\n                    \"exec\": 4000,\n                    \"period\": 24000,\n                    \"loop\": 2,\n                    \"sleep\": false,\n                    \"lock_order\": [\n                        \"wait1\",\n                        \"r0\",\n                        \"trig2\"\n                    ],\n                    \"resources\": {\n                        \"wait1\": {\n                            \"duration\": 0,\n                            \"access\": [\n                                \"trig1_mutex\"\n                            ]\n                        },\n                        \"r0\": {\n                            \"duration\": 300\n                        },\n                        \"trig2\": {\n                            \"duration\": 0\n                        }\n                    }\n                }\n            }\n        },\n        \"ThreadC\": {\n            \"exec\": 1150,\n            \"period\": 24000,\n            \"priority\": -2,\n            \"sleep\": false,\n            \"lock_order\": [\n                \"wait2\",\n                \"r0\",\n                \"sync3\"\n            ],\n            \"resources\": {\n                \"wait2\": {\n                    \"duration\": 0,\n                    \"access\": [\n                        \"trig2_mutex\"\n                    ]\n                },\n                \"r0\": {\n                    \"duration\": 1000\n                },\n                \"sync3\": {\n                    \"duration\": 0,\n                    \"access\": [\n                        \"trig3_mutex\"\n                    ]\n                }\n            }\n        },\n        \"ThreadD\": {\n            \"exec\": 300,\n            \"period\": 24000,\n            \"deadline\": 24000,\n            \"priority\": -2,\n            \"sleep\": false,\n            \"lock_order\": [\n                \"wait3\",\n                \"r0\",\n                \"trig3\"\n            ],\n            \"resources\": {\n                \"wait3\": {\n                    \"duration\": 0,\n                    \"access\": [\n                        \"trig3_mutex\"\n                    ]\n                },\n                \"r0\": {\n                    \"duration\": 300\n                },\n                \"trig3\": {\n                    \"duration\": 0,\n                    \"access\": [\n                        \"trig3_mutex\"\n                    ]\n                }\n            }\n        }\n    },\n    \"resources\": {\n        \"trig1_mutex\": {\n            \"type\": \"mutex\"\n        },\n        \"wait1\": {\n            \"type\": \"wait\"\n        },\n        \"trig1\": {\n            \"type\": \"signal\",\n            \"target\": \"wait1\"\n        },\n        \"trig2_mutex\": {\n            \"type\": \"mutex\"\n        },\n        \"wait2\": {\n            \"type\": \"wait\"\n        },\n        \"trig2\": {\n            \"type\": \"signal\",\n            \"target\": \"wait2\"\n        },\n        \"trig3_mutex\": {\n            \"type\": \"mutex\"\n        },\n        \"wait3\": {\n            \"type\": \"wait\"\n        },\n        \"trig3\": {\n            \"type\": \"signal\",\n            \"target\": \"wait3\"\n        },\n        \"sync3\": {\n            \"type\": \"sync\",\n            \"target\": \"wait3\"\n        },\n        \"r0\": {\n            \"type\": \"run\"\n        }\n    },\n    \"global\": {\n        \"default_policy\": \"SCHED_OTHER\",\n        \"duration\": 5,\n        \"ftrace\": true,\n        \"gnuplot\": false,\n        \"logdir\": \"/root/wa\",\n        \"log_basename\": \"rt-app\",\n        \"lock_pages\": true,\n        \"frag\": 1,\n        \"calibration\": \"CPU1\"\n    }\n}\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/video-long.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"surfaceflinger\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 1500\n\t\t},\n\n\t\t\"DispSync\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\",\n\t\t\t\t\t\"run\" : 35,\n\t\t\t\t\t\"resume\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 40,\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"loop\" : 2,\n\t\t\t\t\t\"suspend\",\n\t\t\t\t\t\"run\" : 30\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\n\t\t\"hwc_eventmon\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\"run\" : 115,\n\t\t\t\"timer\" : { \"ref\" : \"timerA\", \"period\" : 16667 }\n\t\t},\n\n\t\t\"EventThread1\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 25,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"sleep\" : 9650,\n\t\t\t\t\t\"run\" : 70,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"run\" : 80\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 90,\n\t\t\t\t\t\"resume\" : \"DispSync\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"EventThread2\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 30,\n\t\t\t\t\t\"resume\" : \"surfaceflinger\"\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 35,\n\t\t\t\t\t\"sleep\" : 2000,\n\t\t\t\t\t\"run\" : 110,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"run\" : 60\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"waker\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"NuPlayerRenderer\",\n\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 33333 }\n\t\t},\n\n\t\t\"NuPlayerRenderer\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 3,\n\t\t\t\t\t\"suspend\" : \"NuPlayerRenderer\",\n\t\t\t\t\t\"run\" : 140,\n\t\t\t\t\t\"resume\" : \"NuPlayerDriver1\",\n\t\t\t\t\t\"run\" : 95\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 27000,\n\t\t\t\t\t\"run\" : 580,\n\t\t\t\t\t\"resume\" : \"NPDecoder\",\n\t\t\t\t\t\"resume\" : \"NPDecoder-CL\",\n\t\t\t\t\t\"resume\" : \"gle.aac.decoder\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"NuPlayerDriver1\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 100,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 50,\n\t\t\t\"suspend\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 80,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 370,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 135,\n\t\t\t\"resume\" : \"NuPlayerDriver\"\n\t\t},\n\n\t\t\"NuPlayerDriver2\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 110,\n\t\t\t\"resume\" : \"NuPlayerDriver\",\n\t\t\t\"resume\" : \"CodecLooper1\",\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run\" : 80,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 50,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 70,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 35\n\t\t},\n\n\t\t\"CodecLooper1\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 230,\n\t\t\t\"sleep\" : 80,\n\t\t\t\"run\" : 150,\n\t\t\t\"sleep\" : 210,\n\t\t\t\"run\" : 330,\n\t\t\t\"resume\" : \"CodecLooper2\",\n\t\t\t\"sleep\" : 900,\n\t\t\t\"run\" : 170,\n\t\t\t\"sleep\" : 670,\n\t\t\t\"run\" : 125,\n\t\t\t\"resume\" : \"CodecLooper2\"\n\t\t},\n\n\t\t\"CodecLooper2\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 160,\n\t\t\t\"resume\" : \"CodecLooper3\",\n\t\t\t\"sleep\" : 590,\n\t\t\t\"resume\" : \"OMXCallbackDisp2\",\n\t\t\t\"run\" : 75,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 260\n\t\t},\n\n\t\t\"OMXCallbackDisp2\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 180\n\t\t},\n\n\t\t\"CodecLooper3\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 1000\n\t\t},\n\n\t\t\"NPDecoder\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 500,\n\t\t\t\"sleep\" : 680,\n\t\t\t\"resume\" : \"OMXCallbackDisp1\",\n\t\t\t\"run\" : 2000\n\t\t},\n\n\t\t\"NPDecoder-CL\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 570,\n\t\t\t\"sleep\" : 570,\n\t\t\t\"run\" : 2100\n\t\t},\n\n\t\t\"gle.aac.decoder\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 2400,\n\t\t\t\"sleep\" : 430,\n\t\t\t\"run\" : 45\n\t\t},\n\n\t\t\"OMXCallbackDisp1\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 135,\n\t\t\t\"sleep\" : 230,\n\t\t\t\"run\" : 140,\n\t\t\t\"sleep\" : 330,\n\t\t\t\"run\" : 190,\n\t\t\t\"sleep\" : 550,\n\t\t\t\"run\" : 160\n\t\t}\n\t},\n\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 600,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"video\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n\n"
  },
  {
    "path": "wa/workloads/rt_app/use_cases/video-short.json",
    "content": "{\n\t\"tasks\" : {\n\t\t\"surfaceflinger\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 1500\n\t\t},\n\n\t\t\"DispSync\" : {\n\t\t\t\"priority\" : -7,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\",\n\t\t\t\t\t\"run\" : 35,\n\t\t\t\t\t\"resume\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 40,\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"loop\" : 2,\n\t\t\t\t\t\"suspend\",\n\t\t\t\t\t\"run\" : 30\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\n\t\t\"hwc_eventmon\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\"run\" : 115,\n\t\t\t\"timer\" : { \"ref\" : \"timerA\", \"period\" : 16667 }\n\t\t},\n\n\t\t\"EventThread1\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 25,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"sleep\" : 9650,\n\t\t\t\t\t\"run\" : 70,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"run\" : 80\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 90,\n\t\t\t\t\t\"resume\" : \"DispSync\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"EventThread2\" : {\n\t\t\t\"priority\" : -8,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 30,\n\t\t\t\t\t\"resume\" : \"surfaceflinger\"\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"suspend\" : \"EventThread\",\n\t\t\t\t\t\"run\" : 35,\n\t\t\t\t\t\"sleep\" : 2000,\n\t\t\t\t\t\"run\" : 110,\n\t\t\t\t\t\"resume\" : \"DispSync\",\n\t\t\t\t\t\"run\" : 60\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"waker\" : {\n\t\t\t\"priority\" : -19,\n\t\t\t\"loop\" : -1,\n\t\t\t\"resume\" : \"NuPlayerRenderer\",\n\t\t\t\"timer\" : { \"ref\" : \"timerB\", \"period\" : 33333 }\n\t\t},\n\n\t\t\"NuPlayerRenderer\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"phases\" : {\n\t\t\t\t\"p1\" : {\n\t\t\t\t\t\"loop\" : 3,\n\t\t\t\t\t\"suspend\" : \"NuPlayerRenderer\",\n\t\t\t\t\t\"run\" : 140,\n\t\t\t\t\t\"resume\" : \"NuPlayerDriver1\",\n\t\t\t\t\t\"run\" : 95\n\t\t\t\t},\n\n\t\t\t\t\"p2\" : {\n\t\t\t\t\t\"sleep\" : 27000,\n\t\t\t\t\t\"run\" : 580,\n\t\t\t\t\t\"resume\" : \"NPDecoder\",\n\t\t\t\t\t\"resume\" : \"NPDecoder-CL\",\n\t\t\t\t\t\"resume\" : \"gle.aac.decoder\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\t\"NuPlayerDriver1\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 100,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 50,\n\t\t\t\"suspend\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 80,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 370,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 135,\n\t\t\t\"resume\" : \"NuPlayerDriver\"\n\t\t},\n\n\t\t\"NuPlayerDriver2\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 110,\n\t\t\t\"resume\" : \"NuPlayerDriver\",\n\t\t\t\"resume\" : \"CodecLooper1\",\n\t\t\t\"sleep\" : 2500,\n\t\t\t\"run\" : 80,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 50,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 70,\n\t\t\t\"lock\" : \"NuPlayerDriver\",\n\t\t\t\"sync\" : { \"ref\" : \"NuPlayerDriver\", \"mutex\" : \"NuPlayerDriver\" },\n\t\t\t\"unlock\" : \"NuPlayerDriver\",\n\t\t\t\"run\" : 35\n\t\t},\n\n\t\t\"CodecLooper1\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 230,\n\t\t\t\"sleep\" : 80,\n\t\t\t\"run\" : 150,\n\t\t\t\"sleep\" : 210,\n\t\t\t\"run\" : 330,\n\t\t\t\"resume\" : \"CodecLooper2\",\n\t\t\t\"sleep\" : 900,\n\t\t\t\"run\" : 170,\n\t\t\t\"sleep\" : 670,\n\t\t\t\"run\" : 125,\n\t\t\t\"resume\" : \"CodecLooper2\"\n\t\t},\n\n\t\t\"CodecLooper2\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 160,\n\t\t\t\"resume\" : \"CodecLooper3\",\n\t\t\t\"sleep\" : 590,\n\t\t\t\"resume\" : \"OMXCallbackDisp2\",\n\t\t\t\"run\" : 75,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 260\n\t\t},\n\n\t\t\"OMXCallbackDisp2\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 180\n\t\t},\n\n\t\t\"CodecLooper3\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 1000\n\t\t},\n\n\t\t\"NPDecoder\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 500,\n\t\t\t\"sleep\" : 680,\n\t\t\t\"resume\" : \"OMXCallbackDisp1\",\n\t\t\t\"run\" : 2000\n\t\t},\n\n\t\t\"NPDecoder-CL\" : {\n\t\t\t\"priority\" : -15,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 570,\n\t\t\t\"sleep\" : 570,\n\t\t\t\"run\" : 2100\n\t\t},\n\n\t\t\"gle.aac.decoder\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 2400,\n\t\t\t\"sleep\" : 430,\n\t\t\t\"run\" : 45\n\t\t},\n\n\t\t\"OMXCallbackDisp1\" : {\n\t\t\t\"priority\" : -1,\n\t\t\t\"loop\" : -1,\n\t\t\t\"suspend\",\n\t\t\t\"run\" : 135,\n\t\t\t\"sleep\" : 230,\n\t\t\t\"run\" : 140,\n\t\t\t\"sleep\" : 330,\n\t\t\t\"run\" : 190,\n\t\t\t\"sleep\" : 550,\n\t\t\t\"run\" : 160\n\t\t}\n\t},\n\n\t\"global\" : {\n\t\t\"default_policy\" : \"SCHED_OTHER\",\n\t\t\"duration\" : 6,\n\t\t\"ftrace\" : false,\n\t\t\"gnuplot\" : false,\n\t\t\"logdir\" : \"./\",\n\t\t\"log_basename\" : \"video\",\n\t\t\"lock_pages\" : true,\n\t\t\"frag\" : 1,\n\t\t\"calibration\" : \"CPU0\"\n\t}\n}\n\n"
  },
  {
    "path": "wa/workloads/rt_app/workgen",
    "content": "#!/usr/bin/env python\n\nimport os\nimport sys\nimport getopt\nimport subprocess\nimport signal\nimport re\n\ndef check_unikid_json(infile, outfile, verbose=0):\n    if not os.path.exists(infile):\n        print(\"WARN: %s does not exist\", infile)\n\n    try:\n        fi = open(infile, \"r\")\n    except IOError:\n        print(\"WARN: Unable to open %s\", infile)\n        sys.exit(2)\n\n    lines = fi.readlines()\n    fi.close()\n\n    try:\n        fo = open(outfile, \"w+\")\n    except IOError:\n        print(\"WARN: Unable to open %s\", f)\n        sys.exit(2)\n\n    curid = 1\n    refcount = 0\n    idlist = {}\n    myid = []\n    for myline in lines:\n        if \"{\" in myline:\n            refcount += 1\n            myid.append(curid)\n            curid = 1\n            idlist[refcount] = {}\n\n        if \"}\" in myline:\n            del idlist[refcount]\n            curid = myid.pop()\n            refcount -= 1\n\n        try:\n            key_id, value = myline.split(\":\", 1)\n        except ValueError:\n            fo.write(myline)\n            continue\n\n        key_id = key_id.strip('\\\"\\t\\n\\r ')\n        value = value.strip(',\\\"\\t\\n\\r ')\n\n        if key_id in idlist[refcount]:\n            newkey_id = key_id + str(curid)\n            while newkey_id in idlist[refcount]:\n                curid += 1\n                newkey_id = key_id + str(curid)\n\n            if verbose:\n                print(\"level \", refcount, \" : key \", key_id, \" changed into \", newkey_id)\n\n            myline = myline.replace(key_id, newkey_id, 1)\n            key_id = newkey_id\n\n        idlist[refcount][key_id] = value\n        fo.write(myline)\n\n    fo.close()\n\n    return\n\ndef check_suspend_json(infile, outfile, verbose=0):\n    if not os.path.exists(infile):\n        print(\"WARN: %s does not exist\", infile)\n\n    try:\n        fi = open(infile, \"r\")\n    except IOError:\n        print(\"WARN: Unable to open %s\", infile)\n        sys.exit(2)\n\n    lines = fi.readlines()\n    fi.close()\n\n    try:\n        fo = open(outfile, \"w+\")\n    except IOError:\n        print(\"WARN: Unable to open %s\", f)\n        sys.exit(2)\n\n\n    taskobj = 0\n    curid = \"\"\n    for myline in lines:\n\n        exception = 0\n        key_id = \"exception\"\n\n        try:\n            key_id, value = myline.split(\":\", 1)\n        except ValueError:\n            if \"suspend\" in myline:\n                key_id = \"suspend\"\n                exception = 1\n\n        key_id = key_id.strip('\\\"\\t\\n\\r ')\n\n        if not \"tasks\" in key_id and \\\n           taskobj == 0:\n            fo.write(myline)\n            continue\n\n        if \"{\" in myline:\n            taskobj += 1\n            if taskobj == 2:\n                curid = key_id\n\n        if \"}\" in myline:\n            taskobj -= 1\n\n        if \"suspend\" in key_id and \\\n           exception == 1:\n\n            if verbose:\n                print(\"value \", curid, \" added to suspend key\")\n\n            if \",\" in myline:\n                myline = myline.replace(\",\", \" : \" + \"\\\"\" + curid + \"\\\",\", 1)\n            else:\n                myline = myline.replace(\"\\n\", \" : \" + \"\\\"\" + curid + \"\\\"\\n\", 1)\n\n        fo.write(myline)\n\n    fo.close()\n\n    return\n\n# remove trailing commas that may appear after closing\n# brackets and last entries in every section\ndef remove_trailing_commas(outfile):\n    try:\n        f = open(outfile, 'r+')\n    except IOError:\n        print(\"WARN: Unable to open %s\", f)\n        sys.exit(2)\n\n    lines = f.read()\n    check_last_entry_regex = r'(.),(\\n\\s*})'\n    check_end_bracket_regex = r'(}),(\\n\\s*})'\n\n    lines = re.sub(check_last_entry_regex, r'\\g<1>\\g<2>', lines)\n    lines = re.sub(check_end_bracket_regex, r'\\g<1>\\g<2>', lines)\n\n    f.seek(0)\n    f.write(lines)\n    f.truncate()\n    f.close()\n\n    return\n\n\n# Search for comments to remove\ndef comment_remover(text):\n    def replacer(match):\n        s = match.group(0)\n        if s.startswith('/'):\n            return \" \" # note: a space and not an empty string\n        else:\n            return s\n\n    pattern = re.compile(\n              r'//.*?$|/\\*.*?\\*/|\\'(?:\\\\.|[^\\\\\\'])*\\'|\"(?:\\\\.|[^\\\\\"])*\"',\n                re.DOTALL | re.MULTILINE)\n\n    return re.sub(pattern, replacer, text)\n\n# Remove all comments inside the file\ndef remove_all_comments(outfile):\n    try:\n        f = open(outfile, 'r+')\n    except IOError:\n        print(\"WARN: Unable to open %s\", f)\n        sys.exit(2)\n\n    lines = f.read()\n\n    lines = comment_remover(lines)\n    f.seek(0)\n    f.write(lines)\n    f.truncate()\n\n    f.close()\n\n    return\n\nif __name__ == '__main__':\n\n    def handleSigTERM(signum, frame):\n        sys.exit()\n\n    signal.signal(signal.SIGTERM, handleSigTERM)\n    signal.signal(signal.SIGINT, handleSigTERM)\n\n    outfile = \"unikid.json\"\n    selfupdate = 0\n    verbose = 0\n    dry_run = False\n\n    try:\n        opts, args = getopt.getopt(sys.argv[1:], \"o:avd\")\n    except getopt.GetoptError as err:\n        print(str(err)) # will print something like \"option -a not recognized\"\n        sys.exit(2)\n\n    for o, a in opts:\n        if o == \"-o\":\n            outfile = a\n        if o == \"-a\":\n            selfupdate = 1\n        if o == \"-v\":\n            verbose = 1\n        if o == \"-d\":\n            dry_run = True\n\n    for f in args:\n        if selfupdate:\n            outfile = f\n\n        check_suspend_json(f, outfile)\n        check_unikid_json(outfile, outfile)\n        remove_trailing_commas(outfile)\n        remove_all_comments(outfile)\n\n        if not dry_run:\n            subprocess.call([\"rt-app\", outfile])\n"
  },
  {
    "path": "wa/workloads/schbench/__init__.py",
    "content": "#    Copyright 2022 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport re\n\nfrom wa import Workload, Parameter, Executable\nfrom wa.utils.exec_control import once\n\ntimeout_buffer = 10\n\nregex_map = {\"50.0th\": re.compile(r'50.0th: (\\d+)'),\n             \"75.0th\": re.compile(r'75.0th: (\\d+)'),\n             \"90.0th\": re.compile(r'90.0th: (\\d+)'),\n             \"95.0th\": re.compile(r'95.0th: (\\d+)'),\n             \"*99.0th\": re.compile(r'\\*99.0th: (\\d+)'),\n             \"99.5th\": re.compile(r'99.5th: (\\d+)'),\n             \"99.9th\": re.compile(r'99.9th: (\\d+)'),\n             \"min\": re.compile(r'min=(\\d+)'),\n             \"max\": re.compile(r'max=(\\d+)')\n             }\n\n\nclass Schbench(Workload):\n    name = 'schbench'\n    description = \"\"\"\n    Benchmark providing detailed latency distribution statistics for scheduler\n    wakeups.\n    \"\"\"\n\n    parameters = [\n        Parameter('runtime', kind=int, default=30, aliases=['timeout'],\n                  description='How long to run before exiting (seconds)'),\n        Parameter('message_threads', kind=int, default=2,\n                  description='number of message threads'),\n        Parameter('threads', kind=int, default=16,\n                  description='worker threads per message thread'),\n        Parameter('sleeptime', kind=int, default=30000,\n                  description='Message thread latency (usec)'),\n        Parameter('cputime', kind=int, default=30000,\n                  description='How long to think during loop'),\n        Parameter('auto', kind=bool, default=False,\n                  description='grow thread count until latencies hurt'),\n        Parameter('pipe', kind=int, default=0,\n                  description='transfer size bytes to simulate a pipe test'),\n        Parameter('rps', kind=int, default=0,\n                  description='requests per second mode (count)'),\n        Parameter('intervaltime', kind=int,\n                  description='interval for printing latencies (seconds)'),\n    ]\n\n    binary_name = 'schbench'\n    schbench_results_txt = 'schbench_results.txt'\n    output_unit = 'usec'\n\n    @once\n    def initialize(self, context):\n        host_binary = context.get_resource(\n            Executable(self, self.target.abi, self.binary_name))\n        Schbench.target_binary = self.target.install(host_binary)\n\n    def setup(self, context):\n        self.target_output_file = self.target.get_workpath(\n            self.schbench_results_txt)\n        self.run_timeout = self.runtime + timeout_buffer\n        self.command = \"{} -m {} -t {} -r {} -s {} -c {} -p {} -R {} -i {} {}\"\n        self.command = self.command.format(\n            self.target_binary, self.message_threads, self.threads,\n            self.runtime, self.sleeptime, self.cputime, self.pipe, self.rps,\n            self.runtime if not self.intervaltime else self.intervaltime,\n            '-a' if self.auto else '',\n        )\n\n    def run(self, context):\n        self.output = self.target.execute(\n            self.command, timeout=self.run_timeout)\n\n    def extract_results(self, context):\n        host_output_file = os.path.join(\n            context.output_directory, self.schbench_results_txt)\n        with open(host_output_file, \"w\") as f:\n            f.write(self.output)\n        context.add_artifact('schbench-results', host_output_file, kind='raw')\n\n    def update_output(self, context):\n        results_file = context.get_artifact_path('schbench-results')\n        with open(results_file) as fh:\n            for line in fh:\n                for label, regex in regex_map.items():\n                    match = regex.search(line)\n                    if match:\n                        context.add_metric(label, float(match.group(1)),\n                                           self.output_unit)\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall(self.binary_name)\n"
  },
  {
    "path": "wa/workloads/schbench/src/LICENSE",
    "content": "schbench copyright notice\n\nCopyright (C) 2016 Facebook\nChris Mason <clm@fb.com>\n\nGPLv2, portions copied from the kernel and from Jens Axboe's fio\n\nSources can be obtained from:\n\nhttps://git.kernel.org/pub/scm/linux/kernel/git/mason/schbench.git/\n"
  },
  {
    "path": "wa/workloads/shellscript/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201,E0203\n\nimport os\n\nfrom wa import Workload, Parameter\nfrom wa.framework.exception import ConfigError, WorkloadError\n\n\nclass ShellScript(Workload):\n\n    name = 'shellscript'\n    description = \"\"\"\n    Runs an arbitrary shellscript on the target.\n\n    \"\"\"\n\n    parameters = [\n        Parameter('script_file', mandatory=True,\n                  description='''\n                  The path (on the host) to the shell script file. This must be\n                  an absolute path (though it may contain ~).\n                  '''),\n        Parameter('argstring', default='',\n                  description='A string that should contain arguments passed to the script.'),\n        Parameter('as_root', kind=bool, default=False,\n                  description='Specify whether the script should be run as root.'),\n        Parameter('timeout', kind=int, default=60,\n                  description='Timeout, in seconds, for the script run time.'),\n    ]\n\n    def initialize(self, context):\n        if self.as_root and not self.target.is_rooted:\n            raise WorkloadError('Cannot run script as root -- target appears to be unrooted.')\n\n        self.script_file = os.path.expanduser(self.script_file)\n        if not os.path.isfile(self.script_file):\n            raise ConfigError('Can\\'t access file (is the path correct?): {}'.format(self.script_file))\n        self.output = None\n        self.command = None\n        self.on_target_script_file = None\n\n    def setup(self, context):\n        self.on_target_script_file = self.target.get_workpath(os.path.basename(self.script_file))\n        self.target.push(self.script_file, self.on_target_script_file)\n        self.command = 'sh {} {}'.format(self.on_target_script_file, self.argstring)\n\n    def run(self, context):\n        self.output = self.target.execute(self.command, timeout=self.timeout, as_root=self.as_root)\n\n    def extract_results(self, context):\n        with open(os.path.join(context.output_directory, 'output.txt'), 'w') as wfh:\n            wfh.write(self.output)\n\n    def teardown(self, context):\n        if self.cleanup_assets:\n            self.target.remove(self.on_target_script_file)\n"
  },
  {
    "path": "wa/workloads/speedometer/LICENSE",
    "content": "The speedometer_archive.tgz file is a tarball containing the following archives from WebKit:\n\n  the PerformanceTests/Speedometer{,2.1} directory state taken from https://github.com/WebKit/webkit as of:\n  2.0:\n    commit 5f402692d5f3406527dc107b5d20cc47dac929e8 Tue Jul 14 14:06:17 2020 +0000\n  2.1:\n    commit 7a7acedd6de93309eae61d40394f0fbbb886541e Wed Aug 13 00:53:31 2025 -0700\n  3.0:\n    commit 734c49b3d075dcc33f56becf3bde8aca5245b719 Mon Feb 24 09:00:53 2025 -0800\n\nWebKit is open source software with portions licensed under the LGPL and BSD\nlicenses available at https://webkit.org/licensing-webkit/\n"
  },
  {
    "path": "wa/workloads/speedometer/__init__.py",
    "content": "#    Copyright 2014-2025 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom collections import defaultdict\nfrom http.server import SimpleHTTPRequestHandler, HTTPServer\nfrom urllib.parse import urlencode\nimport lzma\nimport os\nimport re\nimport tarfile\nimport tempfile\nimport threading\nimport time\nimport uuid\n\nfrom wa import Parameter, Workload, File\nfrom wa.framework.exception import WorkloadError\nfrom wa.utils.exec_control import once\nfrom wa.utils.misc import safe_extract\n\nfrom devlib.utils.android import adb_command\n\n\nclass Speedometer(Workload):\n\n    name = \"speedometer\"\n    description = \"\"\"\n    A workload to execute the speedometer web based benchmark. Requires device to be rooted.\n    This workload will only with Android 9+ devices if connected via TCP, or Android 5+ if\n    connected via USB.\n\n    Test description:\n\n    1. Host a local copy of the Speedometer website, and make it visible to the device via ADB.\n    2. Open chrome via an intent to access the local copy.\n    3. Execute the benchmark - the copy has been modified to trigger the start of the benchmark.\n    4. The benchmark will write to the browser's sandboxed local storage to signal the benchmark\n       has completed. This local storage is monitored by this workload.\n\n    Known working chrome version 83.0.4103.106\n\n    To modify the archived speedometer workload:\n\n    1. Run 'git clone https://github.com/WebKit/webkit'\n\n    2. Copy PerformanceTests/Speedometer to a directory called document_root, renaming Speedometer\n      to Speedometer<version>. For example, Speedometer2.0.\n\n    3. Modify document_root/Speedometer<version>/index.html:\n\n      3a. (Skip for v3.0) Remove the 'defer' attribute from the <script> tags within the <head> section.\n      3b. Add '<script>startTest();</script>' to the very end of the <body> section.\n\n    4. Modify document_root/Speedometer<version>/resources/main.js (it's main.mjs for 3.0):\n\n      4a. Add the listed code after this line:\n\n            document.getElementById('result-number').textContent = results.formattedMean;\n\n        Code to add::\n\n                if (location.search.length > 1) {\n                    var parts = location.search.substring(1).split('&');\n                    for (var i = 0; i < parts.length; i++) {\n                        var keyValue = parts[i].split('=');\n                        var key = keyValue[0];\n                        var value = keyValue[1];\n                        if (key === \"reportEndId\") {\n                            window.localStorage.setItem('reportEndId', value);\n                        }\n                    }\n                }\n\n    5. Run 'tar -cpzf speedometer_archive-<version>.tar document_root; xz --format=lzma -9 -e speedometer_archive-<version>.tar'\n\n    6. Copy the tarball into the workloads/speedometer directory\n\n    7. If appropriate, update the commit info in the LICENSE file.\n    \"\"\"\n    supported_platforms = [\"android\"]\n\n    package_names = [\"org.chromium.chrome\", \"com.android.chrome\", \"org.bromite.chromium\"]\n    # This regex finds a single XML tag where property 1 and 2 are true:\n    #  1. contains the attribute text=\"XXX\" or content-desc=\"XXX\"\n    #  2. and exclusively either 2a or 2b is true:\n    #   2a. there exists a index=\"3\" or resource-id=\"result-number\" to that attribute's left\n    #   2b. there exists a resource-id=\"result-number\" to that attribute's right\n    # The regex stores the XXX value of that attribute in the named group 'value'.\n    #\n    # Just in case someone wants to learn something:\n    #  If you use (?P<tag>regex)? to match 'regex', and then afterwards you\n    #  have (?(tag)A|B), then regex A will be used if the 'tag' group captured\n    #  something and B will be used if nothing was captured. This is how we\n    #  search for only 'resource-id=\"result-number\"' after the text/content-desc\n    #  _only_ in the case we didn't see it before.\n    #  Since 'index=\"3\"' is always on the left side of the value.\n    regex = re.compile(\n        r'<[^>]*(?P<Z>index=\"3\"|resource-id=\"result-number\")?[^>]*'\n        r'(?:text|content-desc)=\"(?P<value>\\d+.\\d+)\"[^>]*'\n        r'(?(Z)|resource-id=\"result-number\")[^>]*\\/>'\n    )\n\n    parameters = [\n        Parameter(\n            \"chrome_package\",\n            allowed_values=package_names,\n            kind=str,\n            default=\"com.android.chrome\",\n            description=\"\"\"\n                  The app package for the browser that will be launched.\n                  \"\"\",\n        ),\n        Parameter(\n            \"version\",\n            allowed_values=[\"2.0\", \"2.1\", \"3.0\"],\n            kind=str,\n            default=\"2.0\",\n            description=\"\"\"\n                  Speedometer version to run. Currently supports 2.0, 2.1 and 3.0.\n                  \"\"\",\n        ),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(Speedometer, self).__init__(target, **kwargs)\n        self.target_file_was_seen = defaultdict(lambda: False)\n        self.ui_dump_loc = None\n\n    @once\n    def initialize(self, context):\n        super(Speedometer, self).initialize(context)\n        Speedometer.archive_server = ArchiveServer()\n        if not self.target.is_rooted:\n            raise WorkloadError(\n                \"Device must be rooted for the speedometer workload currently\"\n            )\n\n        if not self.target.package_is_installed(self.chrome_package):\n            raise WorkloadError(\n                \"Could not find '{}' on the device. Please ensure it is installed, \"\n                \"or specify the correct package name using 'chrome_package' \"\n                \"parameter.\".format(self.chrome_package))\n\n        if self.target.adb_server is not None:\n            raise WorkloadError(\n                \"Workload does not support the adb_server parameter, due to the webpage \"\n                \"hosting mechanism.\"\n            )\n\n        # Temporary directory used for storing the Speedometer files, uiautomator\n        # dumps, and modified XML chrome config files.\n        Speedometer.temp_dir = tempfile.TemporaryDirectory()\n        Speedometer.document_root = os.path.join(self.temp_dir.name, \"document_root\")\n\n        # Host a copy of Speedometer locally\n        tarball = context.get_resource(File(self, f\"speedometer_archive-{self.version}.tar.lzma\"))\n        with lzma.open(tarball) as lzma_handle:\n            with tarfile.open(fileobj=lzma_handle) as handle:\n                safe_extract(handle, self.temp_dir.name)\n\n        self.archive_server.start(self.document_root)\n\n        Speedometer.speedometer_url = \"http://localhost:{}/Speedometer{}/index.html\".format(\n            self.archive_server.get_port(),\n            self.version,\n        )\n\n    def setup(self, context):\n        super(Speedometer, self).setup(context)\n\n        # We are making sure we start with a 'fresh' browser - no other tabs,\n        # nothing in the page cache, etc.\n\n        # Clear the application's cache.\n        self.target.execute(\"pm clear {}\".format(self.chrome_package), as_root=True)\n\n        # Launch the browser for the first time and then stop it. Since the\n        # cache has just been cleared, this forces it to recreate its\n        # preferences file, that we need to modify.\n        browser_launch_cmd = \"am start -a android.intent.action.VIEW -d {} {}\".format(\n            self.speedometer_url, self.chrome_package\n        )\n        self.target.execute(browser_launch_cmd)\n        time.sleep(1)\n        self.target.execute(\"am force-stop {}\".format(self.chrome_package))\n        time.sleep(1)\n\n        # Pull the preferences file from the device, modify it, and push it\n        # back.  This is done to bypass the 'first launch' screen of the\n        # browser we see after the cache is cleared.\n        self.preferences_xml = \"{}_preferences.xml\".format(self.chrome_package)\n\n        file_to_modify = \"/data/data/{}/shared_prefs/{}\".format(\n            self.chrome_package, self.preferences_xml\n        )\n\n        self.target.pull(file_to_modify, self.temp_dir.name, as_root=True)\n\n        with open(os.path.join(self.temp_dir.name, self.preferences_xml)) as read_fh:\n            lines = read_fh.readlines()\n\n            # Add additional elements for the preferences XML to the\n            # _second-last_ line\n            for line in [\n                '<boolean name=\"first_run_flow\" value=\"true\" />\\n',\n                '<boolean name=\"first_run_tos_accepted\" value=\"true\" />\\n',\n                '<boolean name=\"first_run_signin_complete\" value=\"true\" />\\n',\n                '<boolean name=\"displayed_data_reduction_promo\" value=\"true\" />\\n',\n                # Add a 'request count' value to dismiss the pop-up window on the screen.\n                # If the value is greater than 1, pop-up window will be dismissed.\n                '<int name=\"Chrome.NotificationPermission.RequestCount\" value=\"2\" />\\n',\n            ]:\n                lines.insert(len(lines) - 1, line)\n\n            with open(\n                os.path.join(self.temp_dir.name, self.preferences_xml + \".new\"), \"w\",\n            ) as write_fh:\n                for line in lines:\n                    write_fh.write(line)\n\n        # Make sure ownership of the original file is preserved.\n        user_owner, group_owner = self.target.execute(\n            \"ls -l {}\".format(file_to_modify), as_root=True,\n        ).split()[2:4]\n\n        self.target.push(\n            os.path.join(self.temp_dir.name, self.preferences_xml + \".new\"),\n            file_to_modify,\n            as_root=True,\n        )\n\n        self.target.execute(\n            \"chown {}.{} {}\".format(user_owner, group_owner, file_to_modify),\n            as_root=True,\n        )\n\n    def run(self, context):\n        super(Speedometer, self).run(context)\n\n        self.archive_server.expose_to_device(self.target)\n\n        # Generate a UUID to search for in the browser's local storage to find out\n        # when the workload has ended.\n        report_end_id = uuid.uuid4().hex\n\n        query_params = {\"reportEndId\": report_end_id}\n        # Speedometer 3.0 does not start the test automatically, so we need to\n        # pass the \"startAutomatically=true\" parameter.\n        if self.version == \"3.0\":\n            query_params[\"startAutomatically\"] = \"true\"\n\n        url_with_unique_id = f\"{self.speedometer_url}?{urlencode(query_params)}\"\n        browser_launch_cmd = \"am start -a android.intent.action.VIEW -d '{}' {}\".format(\n            url_with_unique_id, self.chrome_package\n        )\n        self.target.execute(browser_launch_cmd)\n\n        self.wait_for_benchmark_to_complete(report_end_id)\n\n        self.archive_server.hide_from_device(self.target)\n\n    def target_file_was_created(self, f):\n        \"\"\"Assume that once self.target.file_exists(f) returns True, it will\n        always be True from that point forward, so cache the response into the\n        self.target_file_was_seen dict.\"\"\"\n        if not self.target_file_was_seen[f]:\n            self.target_file_was_seen[f] = self.target.file_exists(f)\n        return self.target_file_was_seen[f]\n\n    def wait_for_benchmark_to_complete(self, report_end_id):\n        local_storage = \"/data/data/{}/app_chrome/Default/Local Storage/leveldb\".format(\n            self.chrome_package\n        )\n\n        sleep_period_s = 5\n        find_period_s = 30\n        timeout_period_m = 15\n\n        iterations = 0\n        local_storage_seen = False\n        benchmark_complete = False\n        while not benchmark_complete:\n            if self.target_file_was_created(local_storage):\n                candidate_files = []\n                if (\n                    iterations % (find_period_s // sleep_period_s) == 0\n                    or not local_storage_seen\n                ):\n                    # There's a chance we don't see the localstorage file immediately, and there's a\n                    # chance more of them could be created later, so check for those files every ~30\n                    # seconds.\n                    find_cmd = '{} find \"{}\" -iname \"*.log\"'.format(\n                        self.target.busybox, local_storage\n                    )\n                    candidate_files = self.target.execute(find_cmd, as_root=True).split(\n                        \"\\n\"\n                    )\n\n                local_storage_seen = True\n\n                for ls_file in candidate_files:\n                    # Each local storage file is in a binary format. The busybox grep seems to\n                    # print out the line '[KEY][VALUE]' for a match, rather than just reporting\n                    # that 'binary file X matches', so just check the output for our generated ID.\n                    grep_cmd = '{} grep {} \"{}\"'.format(\n                        self.target.busybox, report_end_id, ls_file\n                    )\n                    output = self.target.execute(\n                        grep_cmd, as_root=True, check_exit_code=False\n                    )\n                    if report_end_id in output:\n                        benchmark_complete = True\n                        break\n\n            iterations += 1\n\n            if iterations > ((timeout_period_m * 60) // sleep_period_s):\n                # We've been waiting <timeout_period_m> minutes for Speedometer to finish running - give up.\n                if not local_storage_seen:\n                    raise WorkloadError(\n                        f\"Speedometer did not complete within {timeout_period_m} minutes - Local Storage wasn't found\"\n                    )\n                raise WorkloadError(f\"Speedometer did not complete within {timeout_period_m} minutes.\")\n\n            time.sleep(sleep_period_s)\n\n    def read_score(self):\n        self.target.execute(\n            \"uiautomator dump {}\".format(self.ui_dump_loc), as_root=True\n        )\n        self.target.pull(self.ui_dump_loc, self.temp_dir.name)\n\n        with open(os.path.join(self.temp_dir.name, \"ui_dump.xml\"), \"rb\") as fh:\n            dump = fh.read().decode(\"utf-8\")\n        match = self.regex.search(dump)\n        result = None\n        if match:\n            result = float(match.group(\"value\"))\n\n        return result\n\n    def update_output(self, context):\n        super(Speedometer, self).update_output(context)\n\n        self.ui_dump_loc = os.path.join(self.target.working_directory, \"ui_dump.xml\")\n\n        score_read = False\n        iterations = 0\n        while not score_read:\n            score = self.read_score()\n\n            if score is not None:\n                context.add_metric(\n                    \"Speedometer Score\", score, \"Runs per minute\", lower_is_better=False\n                )\n                score_read = True\n            else:\n                if iterations >= 10:\n                    raise WorkloadError(\n                        \"The Speedometer workload has failed. No score was obtainable.\"\n                    )\n                else:\n                    # Sleep and retry...\n                    time.sleep(2)\n                    iterations += 1\n\n    def teardown(self, context):\n        super(Speedometer, self).teardown(context)\n\n        # The browser's processes can stick around and have minor impact on\n        # other performance sensitive workloads, so make sure we clean up.\n        self.target.execute(\"am force-stop {}\".format(self.chrome_package))\n\n        if self.cleanup_assets:\n            if self.ui_dump_loc is not None and self.target_file_was_created(\n                self.ui_dump_loc\n            ):\n                # The only thing left on device was the UI dump created by uiautomator.\n                self.target.execute(\"rm {}\".format(self.ui_dump_loc), as_root=True)\n\n        # Clear the cache we used to check if the local storage directory exists.\n        self.target_file_was_seen.clear()\n        self.ui_dump_loc = None\n\n    @once\n    def finalize(self, context):\n        super(Speedometer, self).finalize(context)\n\n        # Shutdown the locally hosted version of Speedometer\n        self.archive_server.stop()\n\n\nclass ArchiveServerThread(threading.Thread):\n    \"\"\"Thread for running the HTTPServer\"\"\"\n\n    def __init__(self, httpd):\n        self._httpd = httpd\n        threading.Thread.__init__(self)\n\n    def run(self):\n        self._httpd.serve_forever()\n\n\nclass DifferentDirectoryHTTPRequestHandler(SimpleHTTPRequestHandler):\n    \"\"\"A version of SimpleHTTPRequestHandler that allows us to serve\n    relative files from a different directory than the current one.\n    This directory is captured in |document_root|. It also suppresses\n    logging.\"\"\"\n\n    def translate_path(self, path):\n        document_root = self.server.document_root\n        path = SimpleHTTPRequestHandler.translate_path(self, path)\n        requested_uri = os.path.relpath(path, os.getcwd())\n        return os.path.join(document_root, requested_uri)\n\n    # Disable the logging.\n    # pylint: disable=redefined-builtin\n    def log_message(self, format, *args):\n        pass\n\n\nclass ArchiveServer(object):\n    def __init__(self):\n        self._port = None\n\n    def start(self, document_root):\n        # Create the server, and find out the port we've been assigned...\n        self._httpd = HTTPServer((\"\", 0), DifferentDirectoryHTTPRequestHandler)\n        # (This property is expected to be read by the\n        #  DifferentDirectoryHTTPRequestHandler.translate_path method.)\n        self._httpd.document_root = document_root\n        _, self._port = self._httpd.server_address\n\n        self._thread = ArchiveServerThread(self._httpd)\n        self._thread.start()\n\n    def stop(self):\n        self._httpd.shutdown()\n        self._thread.join()\n\n    def expose_to_device(self, target):\n        adb_command(target.adb_name, \"reverse tcp:{0} tcp:{0}\".format(self._port))\n\n    def hide_from_device(self, target):\n        adb_command(target.adb_name, \"reverse --remove tcp:{}\".format(self._port))\n\n    def get_port(self):\n        return self._port\n"
  },
  {
    "path": "wa/workloads/stress_ng/LICENSE",
    "content": "                    GNU GENERAL PUBLIC LICENSE\n                       Version 2, June 1991\n\n Copyright (C) 1989, 1991 Free Software Foundation, Inc.,\n 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                            Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Lesser General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n                    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n                            NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n                     END OF TERMS AND CONDITIONS\n\n            How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    <one line to give the program's name and a brief idea of what it does.>\n    Copyright (C) <year>  <name of author>\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License along\n    with this program; if not, write to the Free Software Foundation, Inc.,\n    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) year name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  <signature of Ty Coon>, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License.\n"
  },
  {
    "path": "wa/workloads/stress_ng/__init__.py",
    "content": "#    Copyright 2015, 2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pylint: disable=attribute-defined-outside-init\n\nimport os\n\nfrom wa import Workload, Parameter, ConfigError, Executable\nfrom wa.framework.exception import WorkloadError\nfrom wa.utils.exec_control import once\nfrom wa.utils.serializer import yaml\n\n\nclass StressNg(Workload):\n\n    name = 'stress-ng'\n    description = \"\"\"\n    Run the stress-ng benchmark.\n\n    stress-ng will stress test a computer system in various selectable ways. It\n    was designed to exercise various physical subsystems of a computer as well\n    as the various operating system kernel interfaces.\n\n    stress-ng can also measure test throughput rates; this can be useful to\n    observe performance changes across different operating system releases or\n    types of hardware. However, it has never been intended to be used as a\n    precise benchmark test suite, so do NOT use it in this manner.\n\n    The official website for stress-ng is at:\n        http://kernel.ubuntu.com/~cking/stress-ng/\n\n    Source code are available from:\n        http://kernel.ubuntu.com/git/cking/stress-ng.git/\n    \"\"\"\n\n    parameters = [\n        Parameter('stressor', kind=str, default='cpu',\n                  allowed_values=['cpu', 'io', 'fork', 'switch', 'vm', 'pipe',\n                                  'yield', 'hdd', 'cache', 'sock', 'fallocate',\n                                  'flock', 'affinity', 'timer', 'dentry',\n                                  'urandom', 'sem', 'open', 'sigq', 'poll'],\n                  description='''\n                  Stress test case name. The cases listed in\n                  allowed values come from the stable release\n                  version 0.01.32. The binary included here\n                  compiled from dev version 0.06.01. Refer to\n                  man page for the definition of each stressor.\n                  '''),\n        Parameter('extra_args', kind=str, default=\"\",\n                  description='''\n                  Extra arguments to pass to the workload.\n\n                  Please note that these are not checked for validity.\n                  '''),\n        Parameter('threads', kind=int, default=0,\n                  description='''\n                  The number of workers to run. Specifying a negative\n                  or zero value will select the number of online\n                  processors.\n                  '''),\n        Parameter('duration', kind=int, default=60,\n                  description='''\n                  Timeout for test execution in seconds\n                  ''')\n    ]\n\n    @once\n    def initialize(self, context):\n        if not self.target.is_rooted:\n            raise WorkloadError('stress-ng requires root premissions to run')\n\n        resource = Executable(self, self.target.abi, 'stress-ng')\n        host_exe = context.get_resource(resource)\n        StressNg.binary = self.target.install(host_exe)\n\n    def setup(self, context):\n        self.log = self.target.path.join(self.target.working_directory,\n                                         'stress_ng_output.txt')\n        self.results = self.target.path.join(self.target.working_directory,\n                                             'stress_ng_results.yaml')\n        self.command = ('{} --{} {} {} --timeout {}s --log-file {} --yaml {} '\n                        '--metrics-brief --verbose'\n                        .format(self.binary, self.stressor, self.threads,\n                                self.extra_args, self.duration, self.log,\n                                self.results))\n        self.timeout = self.duration + 10\n\n    def run(self, context):\n        self.output = self.target.execute(self.command, timeout=self.timeout,\n                                          as_root=True)\n\n    def extract_results(self, context):\n        self.host_file_log = os.path.join(context.output_directory,\n                                          'stress_ng_output.txt')\n        self.host_file_results = os.path.join(context.output_directory,\n                                              'stress_ng_results.yaml')\n        self.target.pull(self.log, self.host_file_log)\n        self.target.pull(self.results, self.host_file_results)\n\n        context.add_artifact('stress_ng_log', self.host_file_log, 'log', \"stress-ng's logfile\")\n        context.add_artifact('stress_ng_results', self.host_file_results, 'raw', \"stress-ng's results\")\n\n    def update_output(self, context):\n        with open(self.host_file_results, 'r') as stress_ng_results:\n            results = yaml.load(stress_ng_results)\n\n        try:\n            metric = results['metrics'][0]['stressor']\n            throughput = results['metrics'][0]['bogo-ops']\n            context.add_metric(metric, throughput, 'ops')\n        # For some stressors like vm, if test duration is too short, stress_ng\n        # may not able to produce test throughput rate.\n        except TypeError:\n            msg = '{} test throughput rate not found. Please increase test duration and retry.'\n            self.logger.warning(msg.format(self.stressor))\n\n    def validate(self):\n        if self.stressor == 'vm' and self.duration < 60:\n            raise ConfigError('vm test duration needs to be >= 60s.')\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall('stress-ng')\n"
  },
  {
    "path": "wa/workloads/sysbench/LICENSE",
    "content": "Included sysbench binary is Free Software ditributed under GPLv2:\n\n/* Copyright (C) 2004 MySQL AB\nThis program is free software; you can redistribute it and/or modify\nit under the terms of the GNU General Public License as published by\nthe Free Software Foundation; either version 2 of the License, or\n(at your option) any later version.\nThis program is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\nYou should have received a copy of the GNU General Public License\nalong with this program; if not, write to the Free Software\nFoundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\n*/\n\nThe full text of the license may be viewed here:\n\nhttp://www.gnu.org/licenses/gpl-2.0.html\n\nSource code for sysbench may be obtained here:\n\nhttps://github.com/akopytov/sysbench\n\n"
  },
  {
    "path": "wa/workloads/sysbench/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101,W0201,E0203\n\nimport os\n\nfrom wa import Workload, Parameter, Executable, WorkloadError, ConfigError\nfrom wa.utils.exec_control import once\nfrom wa.utils.misc import parse_value\nfrom wa.utils.types import numeric, cpu_mask\n\n\nclass Sysbench(Workload):\n\n    name = 'sysbench'\n    description = \"\"\"\n    A modular, cross-platform and multi-threaded benchmark tool for evaluating\n    OS parameters that are important for a system running a database under\n    intensive load.\n\n    The idea of this benchmark suite is to quickly get an impression about\n    system performance without setting up complex database benchmarks or\n    even without installing a database at all.\n\n    **Features of SysBench**\n\n       * file I/O performance\n       * scheduler performance\n       * memory allocation and transfer speed\n       * POSIX threads implementation performance\n       * database server performance\n\n\n    See: https://github.com/akopytov/sysbench\n\n    \"\"\"\n\n    parameters = [\n        Parameter('timeout', kind=int, default=300,\n                  description='''\n                  timeout for workload execution (adjust from default if\n                  running on a slow target and/or specifying a large value for\n                  ``max_requests``\n                  '''),\n        Parameter('test', kind=str, default='cpu',\n                  allowed_values=['fileio', 'cpu', 'memory', 'threads', 'mutex'],\n                  description='sysbench test to run'),\n        Parameter('threads', kind=int, default=8, aliases=['num_threads'],\n                  description='''\n                  The number of threads sysbench will launch.\n                  '''),\n        Parameter('max_requests', kind=int, default=None,\n                  description='The limit for the total number of requests.'),\n        Parameter('max_time', kind=int, default=None,\n                  description='''\n                  The limit for the total execution time. If neither this nor\n                  ``max_requests`` is specified, this will default to 30\n                  seconds.\n                  '''),\n        Parameter('file_test_mode', default=None,\n                  allowed_values=['seqwr', 'seqrewr', 'seqrd', 'rndrd', 'rndwr', 'rndrw'],\n                  description='''\n                  File test mode to use. This should only be specified if\n                  ``test`` is ``\"fileio\"``; if that is the case and\n                  ``file_test_mode`` is not specified, it will default to\n                  ``\"seqwr\"`` (please see sysbench documentation for\n                  explanation of various modes).\n                  '''),\n        Parameter('cmd_params', kind=str, default='',\n                  description='''\n                  Additional parameters to be passed to sysbench as a single\n                  string.\n                  '''),\n        Parameter('cpus', kind=cpu_mask, default=0, aliases=['taskset_mask'],\n                  description='''\n                  The processes spawned by sysbench will be\n                  pinned to cores as specified by this parameter. Can be\n                  provided as a mask, a list of cpus or a sysfs-style string.\n                  '''),\n    ]\n\n    def validate(self):\n        if (self.max_requests is None) and (self.max_time is None):\n            self.max_time = 30\n        if self.max_time and (self.max_time + 10) > self.timeout:\n            self.timeout = self.max_time + 10\n        if self.test == 'fileio' and not self.file_test_mode:\n            self.logger.debug('Test is \"fileio\" and no file_test_mode specified -- using default.')\n            self.file_test_mode = 'seqwr'\n        elif self.test != 'fileio' and self.file_test_mode:\n            raise ConfigError('file_test_mode must not be specified unless test is \"fileio\"')\n\n    @once\n    def initialize(self, context):\n        exe = Executable(self, self.target.abi, 'sysbench')\n        host_binary = context.get_resource(exe)\n        Sysbench.target_binary = self.target.install(host_binary)\n\n    def setup(self, context):\n        self.host_results_file = None\n        params = dict(test=self.test,\n                      num_threads=self.threads)\n        if self.max_requests:\n            params['max_requests'] = self.max_requests\n        if self.max_time:\n            params['max_time'] = self.max_time\n        self.target_results_file = self.target.get_workpath('sysbench_result.txt')\n        self.command = self._build_command(**params)\n\n    def run(self, context):\n        self.target.execute(self.command, timeout=self.timeout)\n\n    def extract_results(self, context):\n        self.host_results_file = os.path.join(context.output_directory, 'sysbench_result.txt')\n        self.target.pull(self.target_results_file, self.host_results_file)\n        context.add_artifact('sysbench_output', self.host_results_file, kind='raw')\n\n    def update_output(self, context):\n        if not os.path.exists(self.host_results_file):\n            self.logger.warning('No results file found.')\n            return\n\n        with open(self.host_results_file) as fh:\n            find_line_with('General statistics:', fh)\n            extract_metric('total time', next(fh), context.output)\n            extract_metric('total number of events', next(fh), context.output, lower_is_better=False)\n            find_line_with('response time:', fh)\n            extract_metric('min', next(fh), context.output, 'response time ')\n            extract_metric('avg', next(fh), context.output, 'response time ')\n            extract_metric('max', next(fh), context.output, 'response time ')\n            extract_metric('approx.  95 percentile', next(fh), context.output)\n            find_line_with('Threads fairness:', fh)\n            extract_threads_fairness_metric('events', next(fh), context.output)\n            extract_threads_fairness_metric('execution time', next(fh), context.output)\n\n    def teardown(self, context):\n        if self.cleanup_assets:\n            self.target.remove(self.target_results_file)\n\n    @once\n    def finalize(self, context):\n        if self.uninstall:\n            self.target.uninstall('sysbench')\n\n    def _build_command(self, **parameters):\n        param_strings = ['--{}={}'.format(k.replace('_', '-'), v)\n                         for k, v in parameters.items()]\n        if self.file_test_mode:\n            param_strings.append('--file-test-mode={}'.format(self.file_test_mode))\n        sysbench_command = '{} {} {} run'.format(self.target_binary, ' '.join(param_strings), self.cmd_params)\n        if self.cpus:\n            taskset_string = '{} taskset {} '.format(self.target.busybox, self.cpus.mask())\n        else:\n            taskset_string = ''\n        return 'cd {} && {} {} > sysbench_result.txt'.format(self.target.working_directory, taskset_string, sysbench_command)\n\n\n# Utility functions\n\ndef find_line_with(text, fh):\n    for line in fh:\n        if text in line:\n            return\n    message = 'Could not extract sysbench results from {}; did not see \"{}\"'\n    raise WorkloadError(message.format(fh.name, text))\n\n\ndef extract_metric(metric, line, output, prefix='', lower_is_better=True):\n    try:\n        name, value_part = [part.strip() for part in line.split(':')]\n        if name != metric:\n            message = 'Name mismatch: expected \"{}\", got \"{}\"'\n            raise WorkloadError(message.format(metric, name.strip()))\n        if not value_part or not value_part[0].isdigit():\n            raise ValueError('value part does not start with a digit: {}'.format(value_part))\n        idx = -1\n        if not value_part[idx].isdigit():  # units detected at the end of the line\n            while not value_part[idx - 1].isdigit():\n                idx -= 1\n            value = numeric(value_part[:idx])\n            units = value_part[idx:]\n        else:\n            value = numeric(value_part)\n            units = None\n        output.add_metric(prefix + metric,\n                          value, units, lower_is_better=lower_is_better)\n    except Exception as e:\n        message = 'Could not extract sysbench metric \"{}\"; got \"{}\"'\n        raise WorkloadError(message.format(prefix + metric, e))\n\n\ndef extract_threads_fairness_metric(metric, line, output):\n    try:\n        name_part, value_part = [part.strip() for part in line.split(':')]\n        name = name_part.split('(')[0].strip()\n        if name != metric:\n            message = 'Name mismatch: expected \"{}\", got \"{}\"'\n            raise WorkloadError(message.format(metric, name))\n        avg, stddev = [numeric(v) for v in value_part.split('/')]\n        output.add_metric('thread fairness {} avg'.format(metric), avg)\n        output.add_metric('thread fairness {} stddev'.format(metric),\n                          stddev, lower_is_better=True)\n    except Exception as e:\n        message = 'Could not extract sysbench metric \"{}\"; got \"{}\"'\n        raise WorkloadError(message.format(metric, e))\n"
  },
  {
    "path": "wa/workloads/templerun2/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom wa import ApkReventWorkload\n\n\nclass TempleRun2(ApkReventWorkload):\n\n    name = 'templerun2'\n    package_names = ['com.imangi.templerun2']\n    description = \"\"\"\n    Temple Run 2 game.\n\n    Sequel to Temple Run. 3D on-the-rails racer.\n    \"\"\"\n    view = 'SurfaceView - com.imangi.templerun2/com.imangi.unityactivity.ImangiUnityNativeActivity'\n"
  },
  {
    "path": "wa/workloads/the_chase/__init__.py",
    "content": "#    Copyright 2013-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n# pylint: disable=E1101\nfrom wa import ApkWorkload, Parameter\n\n\nclass TheChase(ApkWorkload):\n\n    name = 'thechase'\n    description = \"\"\"\n    The Chase demo showcasing the capabilities of Unity game engine.\n\n    This demo, is a static video-like game demo, that demonstrates advanced features\n    of the unity game engine. It loops continuously until terminated.\n\n    \"\"\"\n\n    package_names = ['com.unity3d.TheChase']\n    install_timeout = 200\n    view = 'SurfaceView - com.unity3d.TheChase/com.unity3d.player.UnityPlayerNativeActivity'\n\n    parameters = [\n        Parameter('duration', kind=int, default=70,\n                  description=('Duration, in seconds, note that the demo loops the same (roughly) 60 '\n                               'second sceene until stopped.')),\n    ]\n\n    def run(self, context):\n        self.target.sleep(self.duration)\n"
  },
  {
    "path": "wa/workloads/uibench/__init__.py",
    "content": "#    Copyright 2013-2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom wa import Parameter, ApkWorkload\n\n\nclass Uibench(ApkWorkload):\n\n    name = 'uibench'\n    description = \"\"\"\n        Runs a particular activity of the UIBench_ workload suite. The suite\n        is provided by Google as a testbench for the Android UI.\n\n        .. _UIBench: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/tests/UiBench/\n    \"\"\"\n    package_names = ['com.android.test.uibench']\n    loading_time = 1\n\n    parameters = [\n        Parameter('activity', kind=str,\n                  description=\"\"\"\n                  The UIBench activity to be run. Each activity corresponds to\n                  a test. If this parameter is ignored, the application is\n                  launched in its main menu. Please note that the available\n                  activities vary between versions of UIBench (which follow\n                  AOSP versioning) and the availability of the services under\n                  test may depend on the version of the target Android. We\n                  recommend using the APK of UIBench corresponding to the\n                  Android version, enforced through the ``version`` parameter to\n                  this workload.\n                  \"\"\"),\n        Parameter('duration', kind=int, default=10,\n                  description=\"\"\"\n                  As activities do not finish, this workload will terminate\n                  UIBench after the given duration.\n                  \"\"\"),\n    ]\n\n    def run(self, context):\n        super(Uibench, self).run(context)\n        self.target.sleep(self.duration)\n"
  },
  {
    "path": "wa/workloads/uibenchjanktests/__init__.py",
    "content": "#    Copyright 2019 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport re\n\nfrom wa import Parameter, ApkWorkload, PackageHandler, TestPackageHandler, ConfigError\nfrom wa.utils.types import list_or_string\nfrom wa.framework.exception import WorkloadError\n\n\nclass Uibenchjanktests(ApkWorkload):\n\n    name = 'uibenchjanktests'\n    description = \"\"\"\n        Runs a particular test (or list of tests) of the UIBench JankTests_\n        test suite. The suite is provided by Google as an automated version\n        of the UIBench testbench for the Android UI.\n        The workload supports running the default set of tests without\n        restarting the app or running an arbitrary set of tests with\n        restarting the app in between each test.\n\n        .. _JankTests: https://android.googlesource.com/platform/platform_testing/+/master/tests/jank/uibench/src/com/android/uibench/janktests\n    \"\"\"\n    package_names = ['com.android.uibench.janktests']\n    _DUT_PACKAGE = 'com.android.test.uibench'\n    _DEFAULT_CLASS = 'UiBenchJankTests'\n    _OUTPUT_SECTION_REGEX = re.compile(\n        r'(\\s*INSTRUMENTATION_STATUS: gfx-[\\w-]+=[-+\\d.]+\\n)+'\n        r'\\s*INSTRUMENTATION_STATUS_CODE: (?P<code>[-+\\d]+)\\n?', re.M)\n    _OUTPUT_GFXINFO_REGEX = re.compile(\n        r'INSTRUMENTATION_STATUS: (?P<name>[\\w-]+)=(?P<value>[-+\\d.]+)')\n\n    parameters = [\n        Parameter('tests', kind=list_or_string,\n                  description=\"\"\"\n                  Tests to be run. Defaults to running every available\n                  subtest in alphabetical order. The app will be restarted\n                  for each subtest, unlike when using full=True.\n                  \"\"\", default=None, aliases=['test']),\n        Parameter('full', kind=bool, default=False,\n                  description=\"\"\"\n                  Runs the full suite of tests that the app defaults to\n                  when no subtests are specified. The actual tests and their\n                  order might depend on the version of the app. The subtests\n                  will be run back to back without restarting the app in between.\n                  \"\"\"),\n        Parameter('wait', kind=bool, default=True,\n                  description='Forces am instrument to wait until the '\n                  'instrumentation terminates before terminating itself. The '\n                  'net effect is to keep the shell open until the tests have '\n                  'finished. This flag is not required, but if you do not use '\n                  'it, you will not see the results of your tests.'),\n        Parameter('raw', kind=bool, default=True,\n                  description='Outputs results in raw format. Use this flag '\n                  'when you want to collect performance measurements, so that '\n                  'they are not formatted as test results. This flag is '\n                  'designed for use with the flag -e perf true.'),\n        Parameter('instrument_args', kind=dict, default={},\n                  description='Extra arguments for am instrument.'),\n        Parameter('no_hidden_api_checks', kind=bool, default=False,\n                  description='Disables restrictions on the use of hidden '\n                  'APIs.'),\n    ]\n\n    def __init__(self, target, **kwargs):\n        super(Uibenchjanktests, self).__init__(target, **kwargs)\n\n        if 'iterations' not in self.instrument_args:\n            self.instrument_args['iterations'] = 1\n\n        self.dut_apk = PackageHandler(\n            self,\n            package_name=self._DUT_PACKAGE,\n            variant=self.variant,\n            strict=self.strict,\n            version=self.version,\n            force_install=self.force_install,\n            install_timeout=self.install_timeout,\n            uninstall=self.uninstall,\n            exact_abi=self.exact_abi,\n            prefer_host_package=self.prefer_host_package,\n            clear_data_on_reset=self.clear_data_on_reset)\n        self.apk = TestPackageHandler(\n            self,\n            package_name=self.package_name,\n            variant=self.variant,\n            strict=self.strict,\n            version=self.version,\n            force_install=self.force_install,\n            install_timeout=self.install_timeout,\n            uninstall=self.uninstall,\n            exact_abi=self.exact_abi,\n            prefer_host_package=self.prefer_host_package,\n            clear_data_on_reset=self.clear_data_on_reset,\n            instrument_args=self.instrument_args,\n            raw_output=self.raw,\n            instrument_wait=self.wait,\n            no_hidden_api_checks=self.no_hidden_api_checks)\n\n    def validate(self):\n        if self.full and self.tests is not None:\n            raise ConfigError(\"Can't select subtests while 'full' is True\")\n\n    def initialize(self, context):\n        super(Uibenchjanktests, self).initialize(context)\n        self.dut_apk.initialize(context)\n        self.dut_apk.initialize_package(context)\n\n        self.output = {}\n\n        # Full run specified, don't select subtests\n        if self.full:\n            self.apk.args['class'] = '{}.{}'.format(\n                self.package_names[0], self._DEFAULT_CLASS\n            )\n            return\n\n        self.available_tests = {\n            test: cl for test, cl in self.apk.apk_info.methods\n            if test.startswith('test')\n        }\n\n        # default to running all tests in alphabetical order\n        # pylint: disable=access-member-before-definition\n        if not self.tests:\n            self.tests = sorted(self.available_tests.keys())\n        # raise error if any of the tests are not available\n        elif any([t not in self.available_tests for t in self.tests]):\n            msg = 'Unknown test(s) specified. Known tests: {}'\n            known_tests = '\\n'.join(self.available_tests.keys())\n            raise ValueError(msg.format(known_tests))\n\n    def run(self, context):\n        # Full run, just run the activity directly\n        if self.full:\n            self.apk.start_activity()\n            self.apk.wait_instrument_over()\n            self.output['full'] = self.apk.instrument_output\n            return\n\n        for test in self.tests:\n            self.apk.args['class'] = '{}.{}#{}'.format(\n                self.package_names[0],\n                self.available_tests[test], test\n            )\n            self.apk.setup(context)\n            self.apk.start_activity()\n            try:\n                self.apk.wait_instrument_over()\n            except WorkloadError as e:\n                self.logger.warning(str(e))\n            self.output[test] = self.apk.instrument_output\n\n    def update_output(self, context):\n        super(Uibenchjanktests, self).update_output(context)\n        for test, test_output in self.output.items():\n            for section in self._OUTPUT_SECTION_REGEX.finditer(test_output):\n                if int(section.group('code')) != -1:\n                    msg = 'Run failed (INSTRUMENTATION_STATUS_CODE: {}). See log.'\n                    raise RuntimeError(msg.format(section.group('code')))\n                for metric in self._OUTPUT_GFXINFO_REGEX.finditer(section.group()):\n                    context.add_metric(metric.group('name'), metric.group('value'),\n                                       classifiers={'test_name': test})\n\n    def teardown(self, context):\n        super(Uibenchjanktests, self).teardown(context)\n        self.dut_apk.teardown()\n"
  },
  {
    "path": "wa/workloads/vellamo/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport json\nimport re\n\nfrom html.parser import HTMLParser\n\nfrom wa import ApkUiautoWorkload, Parameter\nfrom wa.utils.types import list_of_strs\nfrom wa.framework.exception import WorkloadError\n\n\nclass Vellamo(ApkUiautoWorkload):\n\n    name = 'vellamo'\n    description = \"\"\"\n    Android benchmark designed by Qualcomm.\n\n    Vellamo began as a mobile web benchmarking tool that today has expanded\n    to include three primary chapters. The Browser Chapter evaluates mobile\n    web browser performance, the Multicore chapter measures the synergy of\n    multiple CPU cores, and the Metal Chapter measures the CPU subsystem\n    performance of mobile processors. Through click-and-go test suites,\n    organized by chapter, Vellamo is designed to evaluate: UX, 3D graphics,\n    and memory read/write and peak bandwidth performance, and much more!\n\n    Note: Vellamo v3.0 fails to run on Juno\n\n    \"\"\"\n    package_names = ['com.quicinc.vellamo']\n    run_timeout = 15 * 60\n    benchmark_types = {\n        '2.0.3': ['html5', 'metal'],\n        '3.0': ['Browser', 'Metal', 'Multi'],\n        '3.2.4': ['Browser', 'Metal', 'Multi'],\n    }\n    supported_versions = list(benchmark_types.keys())\n    summary_metrics = None\n\n    parameters = [\n        Parameter('version', kind=str, allowed_values=supported_versions, override=True,\n                  description=('Specify the version of Vellamo to be run. '\n                               'If not specified, the latest available version will be used.')),\n        Parameter('benchmarks', kind=list_of_strs, allowed_values=benchmark_types['3.0'], default=benchmark_types['3.0'],\n                  description=('Specify which benchmark sections of Vellamo to be run. Only valid on version 3.0 and newer.'\n                               '\\nNOTE: Browser benchmark can be problematic and seem to hang,'\n                               'just wait and it will progress after ~5 minutes')),\n        Parameter('browser', kind=int, default=1,\n                  description=('Specify which of the installed browsers will be used for the tests. The number refers to '\n                               'the order in which browsers are listed by Vellamo. E.g. ``1`` will select the first browser '\n                               'listed, ``2`` -- the second, etc. Only valid for version ``3.0``.'))\n    ]\n\n    def setup(self, context):\n        self.gui.uiauto_params['version'] = self.version\n        self.gui.uiauto_params['browserToUse'] = self.browser\n        self.gui.uiauto_params['metal'] = 'Metal' in self.benchmarks\n        self.gui.uiauto_params['browser'] = 'Browser' in self.benchmarks\n        self.gui.uiauto_params['multicore'] = 'Multi' in self.benchmarks\n        super(Vellamo, self).setup(context)\n\n    def initialize(self, context):\n        super(Vellamo, self).initialize(context)\n        if self.version == '2.0.3' or not self.benchmarks:  # pylint: disable=access-member-before-definition\n            self.benchmarks = self.benchmark_types[self.version]  # pylint: disable=attribute-defined-outside-init\n        else:\n            for benchmark in self.benchmarks:\n                if benchmark not in self.benchmark_types[self.version]:\n                    raise WorkloadError('Version {} does not support {} benchmarks'.format(self.version, benchmark))\n\n    def update_output(self, context):\n        super(Vellamo, self).update_output(context)\n\n        # Get total scores from logcat\n        self.non_root_update_output(context)\n\n        if not self.target.is_rooted:\n            return\n        elif self.version == '3.0.0':\n            self.update_output_v3(context)\n        elif self.version == '3.2.4':\n            self.update_output_v3_2(context)\n\n    def update_output_v3(self, context):\n        for test in self.benchmarks:  # Get all scores from HTML files\n            filename = None\n            if test == \"Browser\":\n                result_folder = self.target.path.join(self.target.package_data_directory,\n                                                      self.apk.apk_info.package, 'files')\n                for result_file in self.target.listdir(result_folder, as_root=True):\n                    if result_file.startswith(\"Browser\"):\n                        filename = result_file\n            else:\n                filename = '{}_results.html'.format(test)\n\n            device_file = self.target.path.join(self.target.package_data_directory,\n                                                self.apk.apk_info.package, 'files', filename)\n            host_file = os.path.join(context.output_directory, filename)\n            self.target.pull(device_file, host_file, as_root=True)\n            with open(host_file) as fh:\n                parser = VellamoResultParser()\n                parser.feed(fh.read())\n                for benchmark in parser.benchmarks:\n                    benchmark.name = benchmark.name.replace(' ', '_')\n                    context.add_metric('{}_Total'.format(benchmark.name),\n                                       benchmark.score)\n                    for name, score in list(benchmark.metrics.items()):\n                        name = name.replace(' ', '_')\n                        context.add_metric('{}_{}'.format(benchmark.name,\n                                                          name), score)\n            context.add_artifact('vellamo_output', kind='raw',\n                                 path=filename)\n\n    def update_output_v3_2(self, context):\n        device_file = self.target.path.join(self.target.package_data_directory,\n                                            self.apk.apk_info.package,\n                                            'files',\n                                            'chapterscores.json')\n        host_file = os.path.join(context.output_directory, 'vellamo.json')\n        self.target.pull(device_file, host_file, as_root=True)\n        context.add_artifact('vellamo_output', kind='raw', path=host_file)\n        # context.add_iteration_artifact('vellamo_output', kind='raw', path=host_file)\n        with open(host_file) as results_file:\n            data = json.load(results_file)\n            for chapter in data:\n                for result in chapter['benchmark_results']:\n                    name = result['id']\n                    score = result['score']\n                    context.add_metric(name, score)\n\n    def non_root_update_output(self, context):\n        failed = []\n        logcat_file = context.get_artifact_path('logcat')\n        with open(logcat_file, errors='replace') as fh:\n            iteration_result_regex = re.compile(\"VELLAMO RESULT: (Browser|Metal|Multicore) (\\d+)\")\n            for line in fh:\n                if 'VELLAMO ERROR:' in line:\n                    msg = \"Browser crashed during benchmark, results may not be accurate\"\n                    self.logger.warning(msg)\n                result = iteration_result_regex.findall(line)\n                if result:\n                    for (metric, score) in result:\n                        if not score:\n                            failed.append(metric)\n                        else:\n                            context.add_metric(metric, score)\n        if failed:\n            raise WorkloadError(\"The following benchmark groups failed: {}\".format(\", \".join(failed)))\n\n\nclass VellamoResult(object):\n\n    def __init__(self, name):\n        self.name = name\n        self.score = None\n        self.metrics = {}\n\n    def add_metric(self, data):\n        split_data = data.split(\":\")\n        name = split_data[0].strip()\n        score = split_data[1].strip()\n\n        if name in self.metrics:\n            raise KeyError(\"A metric of that name is already present\")\n        self.metrics[name] = float(score)\n\n\nclass VellamoResultParser(HTMLParser):\n\n    class StopParsingException(Exception):\n        pass\n\n    def __init__(self):\n        HTMLParser.__init__(self)\n        self.inside_div = False\n        self.inside_span = 0\n        self.inside_li = False\n        self.got_data = False\n        self.failed = False\n        self.benchmarks = []\n\n    def feed(self, data):\n        try:\n            HTMLParser.feed(self, data)\n        except self.StopParsingException:\n            pass\n\n    def handle_starttag(self, tag, attrs):\n        if tag == 'div':\n            self.inside_div = True\n        if tag == 'span':\n            self.inside_span += 1\n        if tag == 'li':\n            self.inside_li = True\n\n    def handle_endtag(self, tag):\n        if tag == 'div':\n            self.inside_div = False\n            self.inside_span = 0\n            self.got_data = False\n            self.failed = False\n        if tag == 'li':\n            self.inside_li = False\n\n    def handle_data(self, data):\n        if self.inside_div and not self.failed:\n            if \"Problem\" in data:\n                self.failed = True\n            elif self.inside_span == 1:\n                self.benchmarks.append(VellamoResult(data))\n            elif self.inside_span == 3 and not self.got_data:\n                self.benchmarks[-1].score = int(data)\n                self.got_data = True\n            elif self.inside_li and self.got_data:\n                if 'failed' not in data:\n                    self.benchmarks[-1].add_metric(data)\n                else:\n                    self.failed = True\n\n    def error(self, message):\n        raise WorkloadError('Error parsing raw output: {}'.format(message))\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.vellamo\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.vellamo\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/app/src/main/java/com/arm/wa/uiauto/vellamo/UiAutomation.java",
    "content": "\n/*    Copyright 2014-2017 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n*/\n\n\npackage com.arm.wa.uiauto.vellamo;\n\nimport android.os.Bundle;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiObjectNotFoundException;\nimport android.support.test.uiautomator.UiSelector;\nimport android.support.test.uiautomator.UiWatcher;\nimport android.util.Log;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\n\nimport org.junit.Test;\nimport org.junit.Before;\nimport org.junit.runner.RunWith;\n\nimport java.util.ArrayList;\nimport java.util.concurrent.TimeUnit;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static String TAG = \"vellamo\";\n    public static ArrayList<String> scores = new ArrayList();\n    public static Boolean wasError = false;\n\n    protected Bundle parameters;\n    protected String version;\n    protected Boolean browser;\n    protected Boolean metal;\n    protected Boolean multicore;\n    protected Integer browserToUse;\n    protected String packageID;\n\n    @Before\n    public void initialize(){\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n        version = parameters.getString(\"version\");\n        browser = parameters.getBoolean(\"browser\");\n        metal = parameters.getBoolean(\"metal\");\n        multicore = parameters.getBoolean(\"multicore\");\n        browserToUse = parameters.getInt(\"browserToUse\") - 1;\n    }\n\n    @Test\n    public void setup() throws Exception {\n        dismissAndroidVersionPopup();\n        dismissEULA();\n        if (version.equals(\"2.0.3\")) {\n            dissmissWelcomebanner();\n        } else {\n            dismissLetsRoll();\n            if (version.equals(\"3.2.4\")) {\n                dismissArrow();\n            }\n        }\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        if (version.equals(\"2.0.3\")) {\n            startTest();\n            dismissNetworkConnectionDialogIfNecessary();\n            dismissExplanationDialogIfNecessary();\n            waitForTestCompletion(15 * 60, packageID + \"act_ba_results_btn_no\");\n        } else {\n             if (browser) {\n                 startBrowserTest(browserToUse, version);\n                 proccessTest(\"Browser\");\n             }\n             if (multicore) {\n                 startTestV3(1, version);\n                 proccessTest(\"Multicore\");\n             }\n            if (metal) {\n                startTestV3(2, version);\n                proccessTest(\"Metal\");\n            }\n        }\n    }\n\n    @Test\n    public void extractResults() throws Exception {\n        for(String result : scores){\n            Log.v(TAG, String.format(\"VELLAMO RESULT: %s\", result));\n        }\n        if (wasError) Log.v(\"vellamoWatcher\", \"VELLAMO ERROR: Something crashed while running browser benchmark\");\n    }\n\n    public void startTest() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject runButton = mDevice.findObject(selector.textContains(\"Run All Chapters\"));\n\n        if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            UiObject pager = mDevice.findObject(selector.className(\"android.support.v4.view.ViewPager\"));\n            pager.swipeLeft(2);\n            if (!runButton.exists()) {\n                throw new UiObjectNotFoundException(\"Could not find \\\"Run All Chapters\\\" button.\");\n            }\n        }\n        runButton.click();\n    }\n\n    public void startBrowserTest(int browserToUse, String version) throws Exception {\n        //Ensure chrome is selected as \"browser\" fails to run the benchmark\n        UiSelector selector = new UiSelector();\n        UiObject browserToUseButton = mDevice.findObject(selector.className(\"android.widget.ImageButton\")\n                                               .longClickable(true).instance(browserToUse));\n        UiObject browserButton = mDevice.findObject(selector.className(\"android.widget.ImageButton\")\n                                               .longClickable(true).selected(true));\n        //Disable browsers\n        while(browserButton.exists()) browserButton.click();\n        if (browserToUseButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (browserToUseButton.exists()) {\n                browserToUseButton.click();\n            }\n        }\n\n        //enable a watcher to dismiss browser dialogs\n        UiWatcher stoppedWorkingDialogWatcher = new UiWatcher() {\n            @Override\n            public boolean checkForCondition() {\n                UiObject stoppedWorkingDialog = mDevice.findObject(new UiSelector().textStartsWith(\"Unfortunately\"));\n                if(stoppedWorkingDialog.exists()){\n                    wasError = true;\n                    UiObject okButton = mDevice.findObject(new UiSelector().className(\"android.widget.Button\").text(\"OK\"));\n                    try {\n                        okButton.click();\n                    } catch (UiObjectNotFoundException e) {\n                        // TODO Auto-generated catch block\n                        e.printStackTrace();\n                    }\n                    return (stoppedWorkingDialog.waitUntilGone(25000));\n                }\n                return false;\n            }\n        };\n        // Register watcher\n        mDevice.registerWatcher(\"stoppedWorkingDialogWatcher\", stoppedWorkingDialogWatcher);\n\n        // Run watcher\n        mDevice.runWatchers();\n\n        startTestV3(0, version);\n    }\n\n    public void startTestV3(int run, String version) throws Exception {\n        UiSelector selector = new UiSelector();\n\n        UiObject thirdRunButton = mDevice.findObject(selector.resourceId(packageID + \"card_launcher_run_button\").instance(2));\n        if (!thirdRunButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (!thirdRunButton.exists()) {\n                throw new UiObjectNotFoundException(\"Could not find three \\\"Run\\\" buttons.\");\n            }\n        }\n\n        //Run benchmarks\n        UiObject runButton = mDevice.findObject(selector.resourceId(packageID + \"card_launcher_run_button\").instance(run));\n        if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (!runButton.exists()) {\n                throw new UiObjectNotFoundException(\"Could not find correct \\\"Run\\\" button.\");\n            }\n        }\n        runButton.click();\n\n        //Skip tutorial screen\n        if (version.equals(\"3.2.4\")) {\n            UiObject gotItButton = mDevice.findObject(selector.textContains(\"Got it\"));\n            if (!gotItButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n                if (!gotItButton.exists()) {\n                    throw new UiObjectNotFoundException(\"Could not find correct \\\"GOT IT\\\" button.\");\n                }\n            }\n            gotItButton.click();\n        }\n\n        else {\n            UiObject swipeScreen = mDevice.findObject(selector.textContains(\"Swipe left to continue\"));\n            if (!swipeScreen.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n                if (!swipeScreen.exists()) {\n                    throw new UiObjectNotFoundException(\"Could not find \\\"Swipe screen\\\".\");\n                }\n            }\n            sleep(1);\n            swipeScreen.swipeLeft(2);\n            sleep(1);\n            swipeScreen.swipeLeft(2);\n        }\n\n    }\n\n    public void proccessTest(String metric) throws Exception{\n        waitForTestCompletion(15 * 60, packageID + \"button_no\");\n\n        //Remove watcher\n        mDevice.removeWatcher(\"stoppedWorkingDialogWatcher\");\n\n        getScore(metric, packageID + \"card_score_score\");\n        mDevice.pressBack();\n        mDevice.pressBack();\n        mDevice.pressBack();\n    }\n\n    public void getScore(String metric, String resourceID) throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject score = mDevice.findObject(selector.resourceId(resourceID));\n        if (!score.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (!score.exists()) {\n                throw new UiObjectNotFoundException(\"Could not find score on screen.\");\n            }\n        }\n        scores.add(metric + \" \" + score.getText().trim());\n    }\n\n    public void waitForTestCompletion(int timeout, String resourceID) throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject resultsNoButton = mDevice.findObject(selector.resourceId(resourceID));\n        if (!resultsNoButton.waitForExists(TimeUnit.SECONDS.toMillis(timeout))) {\n            throw new UiObjectNotFoundException(\"Did not see results screen.\");\n        }\n\n    }\n\n    public void dismissEULA() throws Exception {\n        UiSelector selector = new UiSelector();\n        waitText(\"Vellamo EULA\");\n        UiObject acceptButton = mDevice.findObject(selector.textMatches(\"Accept|ACCEPT\")\n                                                     .className(\"android.widget.Button\"));\n        if (acceptButton.exists()) {\n            acceptButton.click();\n        }\n    }\n\n    public void dissmissWelcomebanner() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject welcomeBanner = mDevice.findObject(selector.textContains(\"WELCOME\"));\n        if (welcomeBanner.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            UiObject pager = mDevice.findObject(selector.className(\"android.support.v4.view.ViewPager\"));\n            pager.swipeLeft(2);\n            pager.swipeLeft(2);\n        }\n    }\n\n    public void dismissLetsRoll() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject letsRollButton = mDevice.findObject(selector.className(\"android.widget.Button\")\n                                                       .textContains(\"LET'S ROLL\"));\n        if (!letsRollButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (!letsRollButton.exists()) {\n            // As a fall-back look for the old capitalization\n            letsRollButton = mDevice.findObject(selector.className(\"android.widget.Button\")\n                              .textContains(\"Let's Roll\"));\n            if (!letsRollButton.exists()) {\n            throw new UiObjectNotFoundException(\"Could not find \\\"Let's Roll\\\" button.\");\n            }\n            }\n        }\n        letsRollButton.click();\n    }\n\n    public void dismissArrow() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject cardContainer = mDevice.findObject(selector.resourceId(packageID + \"cards_container\")) ;\n        if (!cardContainer.waitForExists(TimeUnit.SECONDS.toMillis(5))) {\n            if (!cardContainer.exists()) {\n                throw new UiObjectNotFoundException(\"Could not find vellamo main screen\");\n            }\n        }\n    }\n\n    public void dismissNetworkConnectionDialogIfNecessary() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject dialog = mDevice.findObject(selector.className(\"android.widget.TextView\")\n                                               .textContains(\"No Network Connection\"));\n        if (dialog.exists()) {\n            UiObject yesButton = mDevice.findObject(selector.className(\"android.widget.Button\")\n                                                      .text(\"Yes\"));\n            yesButton.click();\n        }\n    }\n\n    public void dismissExplanationDialogIfNecessary() throws Exception {\n        UiSelector selector = new UiSelector();\n        UiObject dialog = mDevice.findObject(selector.className(\"android.widget.TextView\")\n                                               .textContains(\"Benchmarks Explanation\"));\n        if (dialog.exists()) {\n            UiObject noButton = mDevice.findObject(selector.className(\"android.widget.Button\")\n                                                     .text(\"No\"));\n            noButton.click();\n        }\n    }\n}\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.vellamo\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/vellamo/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/youtube/__init__.py",
    "content": "#    Copyright 2014-2018 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom wa import Parameter, ApkUiautoWorkload\nfrom wa.framework.exception import ConfigError\n\n\nclass Youtube(ApkUiautoWorkload):\n\n    name = 'youtube'\n    description = '''\n    A workload to perform standard productivity tasks within YouTube.\n\n    The workload plays a video from the app, determined by the ``video_source`` parameter.\n    While the video is playing, a some common actions are done such as video seeking, pausing\n    playback and navigating the comments section.\n\n    Test description:\n    The ``video_source`` parameter determines where the video to be played will be found\n    in the app. Possible values are ``search``, ``home``, ``my_videos``, and ``trending``.\n\n    -A. search - Goes to the search view, does a search for the given term, and plays the\n        first video in the results. The parameter ``search_term`` must also be provided\n        in the agenda for this to work. This is the default mode.\n    -B. home - Scrolls down once on the app's home page to avoid ads (if present, would be\n        first video), then select and plays the video that appears at the top of the list.\n    -C. my_videos - Goes to the 'My Videos' section of the user's account page and plays a\n        video from there. The user must have at least one uploaded video for this to work.\n    -D. trending - Goes to the 'Trending Videos' section of the app, and plays the first\n        video in the trending videos list.\n\n    For the selected video source, the following test steps are performed:\n\n    1.  Navigate to the general app settings page to disable autoplay. This improves test\n        stability and predictability by preventing screen transition to load a new video\n        while in the middle of the test.\n    2.  Select the video from the source specified above, and dismiss any potential embedded\n        advert that may pop-up before the actual video.\n    3.  Let the video play for a few seconds, pause it, then resume.\n    4.  Expand the info card that shows video metadata, then collapse it again.\n    5.  Scroll down to the end of related videos and comments under the info card, and then\n        back up to the start. A maximum of 5 swipe actions is performed in either direction.\n\n    Known working APK version: 15.45.32\n    '''\n    package_names = ['com.google.android.youtube']\n\n    parameters = [\n        Parameter('video_source', kind=str, default='search',\n                  allowed_values=['home', 'my_videos', 'search', 'trending'],\n                  description='''\n                  Determines where to play the video from. This can either be from the\n                  YouTube home, my videos section, trending videos or found in search.\n                  '''),\n        Parameter('search_term', kind=str,\n                  default='Big Buck Bunny 60fps 4K - Official Blender Foundation Short Film',\n                  description='''\n                  The search term to use when ``video_source`` is set to ``search``.\n                  Ignored otherwise.\n                  '''),\n    ]\n\n    # This workload relies on the internet so check that there is a working\n    # internet connection\n    requires_network = True\n\n    def __init__(self, device, **kwargs):\n        super(Youtube, self).__init__(device, **kwargs)\n        self.run_timeout = 300\n\n    def validate(self):\n        super(Youtube, self).validate()\n        self.gui.uiauto_params['video_source'] = self.video_source\n        self.gui.uiauto_params['search_term'] = self.search_term\n        # Make sure search term is set if video source is 'search'\n        if (self.video_source == 'search') and not self.search_term:\n            raise ConfigError(\"Param 'search_term' must be specified when video source is 'search'\")\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\ndef packageName = \"com.arm.wa.uiauto.youtube\"\n\nandroid {\n    compileSdkVersion 28\n    buildToolsVersion \"28.0.3\"\n    defaultConfig {\n        applicationId \"${packageName}\"\n        minSdkVersion 18\n        targetSdkVersion 28\n        testInstrumentationRunner \"android.support.test.runner.AndroidJUnitRunner\"\n    }\n    buildTypes {\n        applicationVariants.all { variant ->\n            variant.outputs.each { output ->\n                output.outputFileName = \"${packageName}.apk\"\n            }\n        }\n    }\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation 'com.android.support.test:runner:0.5'\n    implementation 'com.android.support.test:rules:0.5'\n    implementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2'\n    implementation(name: 'uiauto', ext:'aar')\n}\n\nrepositories {\n    flatDir {\n        dirs 'libs'\n    }\n}\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.arm.wa.uiauto.youtube\"\n    android:versionCode=\"1\"\n    android:versionName=\"1.0\">\n\n\n    <instrumentation\n        android:name=\"android.support.test.runner.AndroidJUnitRunner\"\n        android:targetPackage=\"${applicationId}\"/>\n\n</manifest>\n\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/app/src/main/java/com/arm/wa/uiauto/youtube/UiAutomation.java",
    "content": "/*    Copyright 2014-2018 ARM Limited\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.arm.wa.uiauto.youtube;\n\nimport android.os.Bundle;\nimport android.os.SystemClock;\nimport android.support.test.internal.runner.ClassPathScanner;\nimport android.support.test.runner.AndroidJUnit4;\nimport android.support.test.uiautomator.UiObject;\nimport android.support.test.uiautomator.UiScrollable;\nimport android.support.test.uiautomator.UiSelector;\n\nimport com.arm.wa.uiauto.BaseUiAutomation;\nimport com.arm.wa.uiauto.ActionLogger;\n\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;\nimport static com.arm.wa.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;\n\n@RunWith(AndroidJUnit4.class)\npublic class UiAutomation extends BaseUiAutomation {\n\n    public static final String SOURCE_MY_VIDEOS = \"my_videos\";\n    public static final String SOURCE_SEARCH = \"search\";\n    public static final String SOURCE_TRENDING = \"trending\";\n\n    public static final int WAIT_TIMEOUT_1SEC = 1000;\n    public static final int VIDEO_SLEEP_SECONDS = 3;\n    public static final int LIST_SWIPE_COUNT = 5;\n\n\n    protected Bundle parameters;\n    protected String packageID;\n\n    @Before\n    public void initialize() {\n        parameters = getParams();\n        packageID = getPackageID(parameters);\n    }\n\n    @Test\n    public void setup() throws Exception {\n        mDevice.setOrientationNatural();\n        runApplicationInitialization();\n    }\n\n    @Test\n    public void runWorkload() throws Exception {\n        String videoSource = parameters.getString(\"video_source\");\n        String searchTerm = parameters.getString(\"search_term\");\n        testPlayVideo(videoSource, searchTerm);\n        dismissAdvert();\n        checkPlayerError();\n        pausePlayVideo();\n        checkVideoInfo();\n        scrollRelated();\n    }\n\n    @Test\n    public void teardown() throws Exception {\n        mDevice.unfreezeRotation();\n    }\n\n    // Get application parameters and clear the initial run dialogues of the application launch.\n    public void runApplicationInitialization() throws Exception {\n        clearFirstRunDialogues();\n        disableAutoplay();\n    }\n\n    // Sets the UiObject that marks the end of the application launch.\n    public UiObject getLaunchEndObject() {\n        UiObject launchEndObject = mDevice.findObject(new UiSelector()\n                                          .resourceId(packageID + \"menu_search\"));\n        return launchEndObject;\n    }\n\n    public void clearFirstRunDialogues() throws Exception {\n        UiObject laterButton =\n            mDevice.findObject(new UiSelector().textContains(\"Later\")\n                                               .className(\"android.widget.TextView\"));\n        if (laterButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n           laterButton.click();\n       }\n\n        UiObject cancelButton =\n            mDevice.findObject(new UiSelector().textContains(\"Cancel\")\n                                               .className(\"android.widget.Button\"));\n       if (cancelButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n        cancelButton.click();\n        }\n\n        UiObject skipButton =\n            mDevice.findObject(new UiSelector().textContains(\"Skip\")\n                                               .className(\"android.widget.TextView\"));\n        if (skipButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            skipButton.click();\n        }\n\n        UiObject gotItButton =\n            mDevice.findObject(new UiSelector().textContains(\"Got it\")\n                                               .className(\"android.widget.Button\"));\n        if (gotItButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            gotItButton.click();\n        }\n    }\n\n    public void disableAutoplay() throws Exception {\n        UiObject moreoptions =\n            mDevice.findObject(new UiSelector().descriptionContains(\"More options\"));\n        if (moreoptions.exists()) {\n            moreoptions.click();\n        }\n        else {\n            clickUiObject(BY_DESC, \"Account\");\n        }\n        clickUiObject(BY_TEXT, \"Settings\", true);\n        clickUiObject(BY_TEXT, \"General\", true);\n\n        // Don't fail fatally if autoplay toggle cannot be found\n        UiObject autoplayToggle =\n            mDevice.findObject(new UiSelector().textContains(\"Autoplay\"));\n        if (autoplayToggle.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            autoplayToggle.click();\n        }\n        mDevice.pressBack();\n\n        // Tablet devices use a split with General in the left pane and Autoplay in the right so no\n        // need to click back twice\n        UiObject generalButton =\n            mDevice.findObject(new UiSelector().textContains(\"General\")\n                                               .className(\"android.widget.TextView\"));\n        if (generalButton.exists()) {\n            mDevice.pressBack();\n        }\n    }\n\n    private void playFirstVideo() throws Exception {\n        UiObject resultsList =\n                mDevice.findObject(new UiSelector().resourceId(packageID + \"results\"));\n        UiObject firstVideo =\n                resultsList.getFromParent(new UiSelector().clickable(true));\n        firstVideo.clickAndWaitForNewWindow();\n    }\n\n    public void testPlayVideo(String source, String searchTerm) throws Exception {\n        String testTag = \"play\";\n        ActionLogger logger = new ActionLogger(testTag + \"_\" + source, parameters);\n\n        if (SOURCE_SEARCH.equalsIgnoreCase(source)) {\n            clickUiObject(BY_DESC, \"Search\");\n            UiObject textField = getUiObjectByResourceId(packageID + \"search_edit_text\");\n            textField.setText(searchTerm);\n            mDevice.pressEnter();\n            // If a video exists whose title contains the exact search term, then play it\n            // Otherwise click the first video in the search results\n            UiObject matchedVideo = mDevice.findObject(new UiSelector().descriptionContains(searchTerm));\n\n            logger.start();\n            if (matchedVideo.exists()) {\n                matchedVideo.clickAndWaitForNewWindow();\n            } else {\n                playFirstVideo();\n            }\n            logger.stop();\n\n        } else if (SOURCE_MY_VIDEOS.equalsIgnoreCase(source)) {\n            clickUiObject(BY_DESC, \"Account\");\n            clickUiObject(BY_TEXT, \"My Videos\", true);\n\n            logger.start();\n            playFirstVideo();\n            logger.stop();\n\n        } else if (SOURCE_TRENDING.equalsIgnoreCase(source)) {\n            clickUiObject(BY_DESC, \"Explore\", true);\n            clickUiObject(BY_DESC, \"Trending\", true);\n\n            logger.start();\n            playFirstVideo();\n            logger.stop();\n\n        } else { // homepage videos\n            UiScrollable list =\n                new UiScrollable(new UiSelector().resourceId(packageID + \"results\"));\n            if (list.exists()) {\n                list.scrollForward();\n            }\n\n            logger.start();\n            playFirstVideo();\n            logger.stop();\n\n        }\n    }\n\n    public void dismissAdvert() throws Exception {\n        UiObject advert =\n            mDevice.findObject(new UiSelector().textContains(\"Visit advertiser\"));\n        if (advert.exists()) {\n            UiObject skip =\n                mDevice.findObject(new UiSelector().textContains(\"Skip ad\"));\n            if (skip.waitForExists(WAIT_TIMEOUT_1SEC*5)) {\n                skip.click();\n                sleep(VIDEO_SLEEP_SECONDS);\n            }\n        }\n    }\n\n    public void checkPlayerError() throws Exception {\n        UiObject playerError =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"player_error_view\"));\n        UiObject tapToRetry =\n            mDevice.findObject(new UiSelector().textContains(\"Tap to retry\"));\n        if (playerError.waitForExists(WAIT_TIMEOUT_1SEC) || tapToRetry.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            throw new RuntimeException(\"Video player encountered an error and cannot continue.\");\n        }\n    }\n\n    public void pausePlayVideo() throws Exception {\n        UiObject player = getUiObjectByResourceId(packageID + \"player_fragment_container\");\n        sleep(VIDEO_SLEEP_SECONDS);\n        repeatClickUiObject(player, 2, 100);\n        sleep(1); // pause the video momentarily\n        player.click();\n        sleep(VIDEO_SLEEP_SECONDS);\n    }\n\n    public void checkVideoInfo() throws Exception {\n        UiObject expandButton =\n            mDevice.findObject(new UiSelector().resourceId(packageID + \"expand_button\"));\n        if (!expandButton.waitForExists(WAIT_TIMEOUT_1SEC)) {\n            return;\n        }\n        // Expand video info\n        expandButton.click();\n        sleep(1); // short delay to simulate user action\n        expandButton.click();\n    }\n\n    public void scrollRelated() throws Exception {\n        String testTag = \"scroll\";\n\n        // ListView of related videos and (maybe) comments\n        UiScrollable list =\n            new UiScrollable(new UiSelector().resourceId(packageID + \"watch_list\"));\n        if (list.isScrollable()) {\n            ActionLogger logger = new ActionLogger(testTag + \"_down\", parameters);\n            logger.start();\n            list.flingToEnd(LIST_SWIPE_COUNT);\n            logger.stop();\n\n            logger = new ActionLogger(testTag + \"_up\", parameters);\n            logger.start();\n            list.flingToBeginning(LIST_SWIPE_COUNT);\n            logger.stop();\n        }\n        // After flinging, give the window enough time to settle down before\n        // the next step, or else UiAutomator fails to find views in time\n        sleep(VIDEO_SLEEP_SECONDS);\n    }\n}\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        google()\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:7.2.1'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        google()\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/build.sh",
    "content": "#!/bin/bash\n#    Copyright 2013-2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nset -e\n\n# CD into build dir if possible - allows building from any directory\nscript_path='.'\nif `readlink -f $0 &>/dev/null`; then\n    script_path=`readlink -f $0 2>/dev/null`\nfi\nscript_dir=`dirname $script_path`\ncd $script_dir\n\n# Ensure gradelw exists before starting\nif [[ ! -f gradlew ]]; then\n    echo 'gradlew file not found! Check that you are in the right directory.'\n    exit 9\nfi\n\n# Copy base class library from wa dist\nlibs_dir=app/libs\nbase_class=`python3 -c \"import os, wa; print(os.path.join(os.path.dirname(wa.__file__), 'framework', 'uiauto', 'uiauto.aar'))\"`\nmkdir -p $libs_dir\ncp $base_class $libs_dir\n\n# Build and return appropriate exit code if failed\n# gradle build\n./gradlew clean :app:assembleDebug\nexit_code=$?\nif [[ $exit_code -ne 0 ]]; then\n    echo \"ERROR: 'gradle build' exited with code $exit_code\"\n    exit $exit_code\nfi\n\n# If successful move APK file to workload folder (overwrite previous)\npackage=com.arm.wa.uiauto.youtube\nrm -f ../$package\nif [[ -f app/build/outputs/apk/debug/$package.apk ]]; then\n    cp app/build/outputs/apk/debug/$package.apk ../$package.apk\nelse\n    echo 'ERROR: UiAutomator apk could not be found!'\n    exit 9\nfi\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Wed May 03 15:42:44 BST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-7.3.3-all.zip\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "wa/workloads/youtube/uiauto/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "wa/workloads/youtube_playback/__init__.py",
    "content": "#    Copyright 2017 ARM Limited\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport time\n\nfrom wa import Parameter, ApkWorkload\n\n\nclass YoutubePlayback(ApkWorkload):\n    \"\"\"\n    Simple Youtube video playback\n\n    This triggers a video streaming playback on Youtube. Unlike the more\n    featureful \"youtube\" workload, this performs no other action that starting\n    the video via an intent and then waiting for a certain amount of playback\n    time. This is therefore only useful when you are confident that the content\n    on the end of the provided URL is stable - that means the video should have\n    no advertisements attached.\n    \"\"\"\n    name = 'youtube_playback'\n\n    package_names = ['com.google.android.youtube']\n    action = 'android.intent.action.VIEW'\n\n    parameters = [\n        Parameter('video_url', default='https://www.youtube.com/watch?v=YE7VzlLtp-4',\n                  description='URL of video to play'),\n        Parameter('duration', kind=int, default=20,\n                  description='Number of seconds of video to play'),\n    ]\n\n    def setup(self, context):\n        super(YoutubePlayback, self).setup(context)\n\n        self.command = 'am start -a {} {}'.format(self.action, self.video_url)\n\n    def run(self, context):\n        self.target.execute(self.command)\n\n        time.sleep(self.duration)\n"
  }
]