[
  {
    "path": ".github/CODEOWNERS",
    "content": "* @HiveRunner/hiverunner-committers\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a report to help us improve\n\n---\n<!-- \n Before raising a bug report please consider the following:\n   1. If you want to ask a question don't raise a bug report - rather use the mailing list at https://groups.google.com/forum/#!forum/hive-runner-user\n   2. Please ensure that the bug your are reporting is actually in HiveRunner and not with Hive itself. Because HiveRunner tests Hive queries, if there \n      are issues with your queries or Hive setup, it will just return any errors that Hive itself throws and users sometimes mistakenly report these\n      as HiveRunner issues. The easiest way to check this is to perform your query against Hive directly. If the issue still persists then it's not \n      related to HiveRunner so please don't report it here.  \n-->\n**Describe the bug**\nA clear and concise description of what the bug is.\n\n**To Reproduce**\nSteps to reproduce the behaviour ideally including the configuration files you are using (feel free to rename any sensitive information like server and table names etc.) \nEven better would be the source code of your unit test or a pull request against the HiveRunner unit tests containing a test that demonstrates the issue.\n\n**Expected behavior**\nA clear and concise description of what you expected to happen.\n\n**Logs**\nPlease add the log output from HiveRunner when the error occurs, full stack traces are especially useful.\n\n**Versions (please complete the following information):**\n - HiveRunner Version: \n - Hive Versions: for whatever version of Hive you are using for your tests\n\n**Additional context**\nAdd any other context about the problem here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature request\nabout: Suggest an idea for this project\n\n---\n\n**Is your feature request related to a problem? Please describe.**\nA clear and concise description of what the problem is. For example - I'm always frustrated when [...]\n\n**Describe the solution you'd like**\nA clear and concise description of what you want to happen.\n\n**Describe alternatives you've considered**\nA clear and concise description of any alternative solutions or features you've considered.\n\n**Additional context**\nAdd any other context or screenshots about the feature request here.\n"
  },
  {
    "path": ".github/workflows/deploy.yml",
    "content": "name: Deploy SNAPSHOT\non:\n  workflow_dispatch:\n    inputs:\n      branch:\n        description: \"The branch to use to deploy a SNAPSHOT from.\"\n        required: true\n        default: \"main\"\njobs:\n  deploy:\n    name: Deploy SNAPSHOT to Sonatype\n    runs-on: ubuntu-20.04\n    steps:\n    - uses: actions/checkout@v2\n      with:\n        fetch-depth: 0\n        ref: ${{ github.event.inputs.branch }}\n    - name: Set up JDK\n      uses: actions/setup-java@v2\n      with:\n        distribution: 'adopt'\n        java-version: '8'\n        # this creates a settings.xml with the following server\n        settings-path: ${{ github.workspace }}\n        server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml\n        server-username: SONATYPE_USERNAME # env variable for username in deploy\n        server-password: SONATYPE_PASSWORD # env variable for token in deploy        \n        # only signed artifacts will be released to maven central. this sets up things for the maven-gpg-plugin\n        gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} # Value of the GPG private key to import\n        gpg-passphrase: GPG_PASSPHRASE # env variable for GPG private key passphrase\n       \n    - name: Run Maven Targets\n      run: mvn deploy --settings $GITHUB_WORKSPACE/settings.xml --batch-mode --show-version --no-transfer-progress --activate-profiles oss-release\n      env:\n        SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}\n        SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}\n        GPG_PASSPHRASE: ${{ secrets.GPG_PRIVATE_KEY_PASSPHRASE }}\n"
  },
  {
    "path": ".github/workflows/main.yml",
    "content": "name: build\n\non: \n  pull_request:\n  push:\n    branches: \n      - main\n\njobs:\n  test:\n    name: Package and run all tests\n    runs-on: ubuntu-20.04\n    steps:\n    - uses: actions/checkout@v2\n      with:\n        fetch-depth: 0\n    - name: Set up JDK\n      uses: actions/setup-java@v2\n      with:\n        distribution: 'adopt'\n        java-version: '8'\n    - name: Run Maven Targets\n      run: mvn package --batch-mode --show-version --no-transfer-progress\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release to Maven Central\non:\n  workflow_dispatch:\n    inputs:\n      branch:\n        description: \"The branch to use to release from.\"\n        required: true\n        default: \"main\"\njobs:\n  release:\n    name: Release to Maven Central\n    runs-on: ubuntu-20.04\n\n    steps:\n    - name: Checkout source code\n      uses: actions/checkout@v2\n      with:\n        fetch-depth: 0\n        ref: ${{ github.event.inputs.branch }}\n\n    - name: Set up JDK\n      uses: actions/setup-java@v2\n      with:\n        distribution: 'adopt'\n        java-version: '8'\n        # this creates a settings.xml with the following server\n        settings-path: ${{ github.workspace }}\n        server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml\n        server-username: SONATYPE_USERNAME # env variable for username in deploy\n        server-password: SONATYPE_PASSWORD # env variable for token in deploy        \n        # only signed artifacts will be released to maven central. this sets up things for the maven-gpg-plugin\n        gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} # Value of the GPG private key to import\n        gpg-passphrase: GPG_PASSPHRASE # env variable for GPG private key passphrase\n\n    - name: Configure Git User\n      run: |\n        git config user.email \"actions@github.com\"\n        git config user.name \"GitHub Actions\"\n\n    - name: Run Maven Targets\n      run: mvn release:prepare release:perform --settings $GITHUB_WORKSPACE/settings.xml --activate-profiles oss-release --batch-mode --show-version --no-transfer-progress \n      env:\n        SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}\n        SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}\n        GPG_PASSPHRASE: ${{secrets.GPG_PRIVATE_KEY_PASSPHRASE}}\n        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": "# Project files #\n#################\n*.iml\n*.ipr\n*.iws\nnbactions.xml\n/.idea/\n\n# Compiled source #\n###################\n*.com\n*.class\n*.dll\n*.exe\n*.o\n*.so\n\n# Maven target #\n################\n/target/**\n\n# H2 db files #\n###############\nmem.h2.db\nmem.lock.db\n\n# HSQLDB files #\n################\ntestdb.log\ntestdb.properties\ntestdb.script\n\n# Packages #\n############\n# it's better to unpack these files and commit the raw source\n# git has its own built in compression methods\n*.7z\n*.dmg\n*.gz\n*.iso\n*.jar\n*.rar\n*.tar\n*.zip\n\n# Logs and databases #\n######################\n*.log\n*.sqlite\nmetastore_db\n\n# OS generated files #\n######################\n.DS_Store\n.DS_Store?\n._*\n.Spotlight-V100\n.Trashes\nIcon?\nehthumbs.db\nThumbs.db\n\n# Temporary files #\n###################\n*~\n\n# Eclipse #\n###########\n.classpath\n.project\n.settings/\n\n# IntelliJ #\n############\n.gradle/\n*.eml\n\n# jEnv #\n########\n/.java-version\n\n# Misc #\n########\n/pubring.kbx\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).\n## [7.0.0] - 2024-11-22\n### Added\n- Added version `6.0.9` of `datanucleus-core`.\n- Added version `6.0.4` of `datanucleus-api-jdo`.\n- Added version `6.0.9` of `datanucleus-rdbms`.\n- Added version `1.3` of `javax.transaction-api`.\n- Added version `6.1.14` of `spring-jdbc`.\n- Added version `10.15.2.0` of `derby`.\n- Added version `10.15.2.0` of `derbytools`.\n- Added version `5.6.2` of `kryo`.\n- Added version `4.9.3` of `antlr4-runtime`.\n- Added version `4.0.1` of `kafka-handler`.\n- Added missing Hive & Datanucleus properties in StandaloneHiveServerContext so now the framework works with a new Hive dependency versions.\n\n### Changed\n- Updated `hadoop-mapreduce-client-common` from `3.1.0` to `3.4.1`.\n- Updated `hadoop-mapreduce-client-core` from `3.1.0` to `3.4.1`.\n- Updated `hadoop-client-runtime` from `3.1.0` to `3.4.1`.\n- Updated `hive-exec` from `3.1.2` to `4.0.1`.\n- Updated `hive-serde` from `3.1.0` to `3.4.1`.\n- Updated `hive-jdbc` from `3.1.0` to `3.4.1`.\n- Updated `hive-contrib` from `3.1.0` to `3.4.1`.\n- Updated `hive-webhcat-java-client` from `3.1.0` to `3.4.1`.\n- Updated `jackson-annotations` from `2.9.5` to `2.18.1`.\n- Updated `reflections` from `0.9.8` to `0.10.2`.\n- Updated `mockito-core` from `3.8.0` to `5.14.2`.\n- Updated `mockito-junit-jupiter` from `3.8.0` to `5.14.2`.\n- Updated `tez-common` from `0.9.1` to `0.10.4`.\n- Updated `tez-mapreduce` from `0.9.1` to `0.10.4`.\n- Updated `junit-jupiter` from `5.7.1` to `5.11.3`.\n- Updated `junit-vintage-engine` from `5.7.1` to `5.11.2`.\n- Updated `maven-surefire-plugin` from `2.22.2` to `3.5.1`.\n- Updated `maven-compiler-plugin` from `3.7.0` to `3.13.0`.\n- Updated `maven-jar-plugin` from `3.2.0` to `3.4.2`.\n- Updated `maven-release-plugin` from `3.0.0-M1` to `3.1.1`.\n- Updated `nexus-staging-maven-plugin` from `1.6.8` to `1.7.0`.\n- Updated `maven-source-plugin` from `3.2.0` to `3.3.1`.\n- Updated `maven-javadoc-plugin` from `3.2.0` to `3.10.1`.\n- Updated `maven-gpg-plugin` from `1.6` to `3.2.7`.\n- Updated `HiveConf` property names in `StandaloneHiveServerContext`\n- Set `METASTORE_VALIDATE_CONSTRAINTS`, `METASTORE_VALIDATE_COLUMNS`, `METASTORE_VALIDATE_TABLES` properties to false in StandaloneHiveServerContext.\n\n### Removed\n- Removed `com.google.common.base.Predicates` in `HiveRunnerExtension`/`StandaloneHiveRunner` as it is no longer used in a new version of `org.reflections:reflections` library.\n\n### Fixed\n- Fixed warning \"org.apache.hadoop.hive.metastore.MetastoreDirectSqlUtils - Failed to execute [select \"FUNCS\".\"FUNC_ID\" from \"FUNCS\" LEFT JOIN \"DBS\" ON \"FUNCS\".\"DB_ID\" = \"DBS\".\"DB_ID\" where \"DBS\".\"CTLG_NAME\" = ? ]...\" is not logged anymore.\n- Fixed `IgnoreClosePrintStream` as NPE was thrown after upgrading to Java >= 11\n- Fixed error \"org.apache.hadoop.hive.ql.exec.tez.DagUtils - Failed to add credential supplier java.lang.ClassNotFoundException: org.apache.hadoop.hive.kafka.KafkaDagCredentialSupplier\"\n\n## [6.1.0] - 2021-04-28\n### Changed\n- Maven Group Id changed from `com.klarna` to `io.github.hiverunner`.\n- Set `HIVE_IN_TEST` to true in `StandaloneHiverServerContext` instead of `StandaloneHiveRunner` so checks for non-existent tables are skipped by both the JUnit4 runner and the JUnit5 extension (this removes a lot of log noise from tests using the latter).\n- Made `HiveRunnerScript` constructor public.\n- Made `scriptsUnderTest` variable in `HiveRunnerExtension` protected so it can be used in [MutantSwarm](https://github.com/HotelsDotCom/mutant-swarm).\n- Fixed bug that appears in [Mutant Swarm](https://github.com/HotelsDotCom/mutant-swarm) when updating HiveRunner to version 5.2.1.\n- Renamed `HelloAnnotatedHiveRunner` in `com.klarna.hiverunner.examples` to `HelloAnnotatedHiveRunnerTest`.\n- Renamed `HelloHiveRunner` in `com.klarna.hiverunner.examples` to `HelloHiveRunnerTest`.\n- Renamed `InsertTestData` in `com.klarna.hiverunner.examples` to `InsertTestDataTest`.\n- Renamed `SetHiveConfValues` in `com.klarna.hiverunner.examples` to `SetHiveConfValuesTest`.\n- Renamed `HelloAnnotatedHiveRunner` in `com.klarna.hiverunner.examples.junit4` to `HelloAnnotatedHiveRunnerTest`.\n- Renamed `HelloHiveRunner` in `com.klarna.hiverunner.examples.junit4` to `HelloHiveRunnerTest`.\n- Renamed `InsertTestData` in `com.klarna.hiverunner.examples.junit4` to `InsertTestDataTest`.\n- Renamed `SetHiveConfValues` in `com.klarna.hiverunner.examples.junit4` to `SetHiveConfValuesTest`.\n- Updated `surefire-version-plugin` from `2.21.0` to `2.22.0`.\n- Updated `junit.jupiter.version` (JUnit5) from `5.6.0` to `5.7.1`.\n- Updated `junit` (JUnit4) from `4.13.1` to `4.13.2`.\n- Updated `mockito-core` from `2.18.3` to `3.8.0`.\n\n### Added\n- Added `getScriptPaths` method in `HiveRunnerCore`.\n- Added `getScriptPaths` method in `HiveRunnerExtension` to be able to access the other method in `HiveRunnerCore` so that it can be used downstream in [MutantSwarm](https://github.com/HotelsDotCom/mutant-swarm).\n- Added `fromScriptPaths` method in `HiveShellBuilder`.\n- Added version `5.7.1` of `junit-vintage-engine`.\n- Added version `3.8.0` of `mockito-junit-jupiter`.\n\n### Fixed\n- Fixed bug where the files specified in `@HiveSQL` weren't being run when using `HiveRunnerExtension`.\n- Successful tests using \"SET\" no longer marked as \"terminated\" when run in IntelliJ. See [#94](https://github.com/klarna/HiveRunner/issues/94).\n\n## [6.0.1] - 2020-09-07\n### Removed\n- Removed shaded jar that was being produced as a side-effect.\n\n## [6.0.0] - 2020-09-03\n### Changed\n- Upgraded Hive version to 3.1.2 (was 2.3.7).\n\n## [5.x]\n### NOTE\n- Releases from the 5.x (Hive 2) line are not tracked in this CHANGELOG, it only tracks 6.0.0 and above. For changes in 5.x please refer to https://github.com/klarna/HiveRunner/blob/hive-2.x/CHANGELOG.md.\n\n## [5.0.0] - 2019-09-30\n### Added\n- JUnit5 [Extension](https://junit.org/junit5/docs/current/user-guide/#extensions) support with `HiveRunnerExtension`. See [#106](https://github.com/klarna/HiveRunner/issues/106).\n\n### Changed\n- Default supported Hive version is now 2.3.4 (was 2.3.3) as version 2.3.3 has a [vulnerability](https://nvd.nist.gov/vuln/detail/CVE-2018-1314).\n- `TemporaryFolder` ([JUnit 4](https://junit.org/junit4/javadoc/4.12/org/junit/rules/TemporaryFolder.html)) has been changed to `Path` ([Java NIO](https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html)) throughout the project for the JUnit5 update. \n- NOTE: The `HiveServerContext` class now uses `Path` instead of `TemporaryFolder` in the constructor.\n\n## [4.1.0] - 2019-02-27\n### Changed\n- Internal refactoring to support upcoming \"Mutant Swarm\" project which provides unit test coverage for Hive SQL scripts. See [#65](https://github.com/klarna/HiveRunner/issues/65).\n\n## [4.0.0] - 2018-07-17\n### Added\n- Support shell-specific `source` (`hive`) and ``!run`` (`beeline`) commands. These commands allow one to import and execute the contents of external files in statements or scripts.\n\n### Changed\n- Default supported Hive version is now 2.3.3 (was 1.2.1).\n- Default supported Tez version is now 0.9.1 (was 0.7.0).\n- Supported Java version is 8 (was 7).\n- In-memory DB used by HiveRunner is now Derby (was HSQLDB).\n- Log4J configuration file removed from jar artifact.\n- System property to configure command shell emulation mode renamed to `commandShellEmulator` (was `commandShellEmulation`).\n\n## [3.2.1] - 2018-05-31\n### Changed\n- Fixed issue where if case of column name in a file was different to case in table definition they would be treated as different [#73](https://github.com/klarna/HiveRunner/issues/73).\n- The way of setting writable permissions on JUnit temporary folder changed to make it compatible with Windows [#63](https://github.com/klarna/HiveRunner/issues/63).\n\n## [3.2.0] - 2017-02-09\n### Added\n- Added functionality for headers in TSV parser. This way you can dynamically add TSV files declaring a subset of columns using insertInto.\n\n## [3.1.1] - 2017-01-27\n### Added\n- Added debug logging of result set. Enable by setting ```log4j.logger.com.klarna.hiverunner.HiveServerContainer=DEBUG``` in log4j.properties.\n\n## [3.1.0] - 2016-10-17\n### Added\n- Added methods to the shell that allow statements contained in files to be executed and their results gathered. These are particularly useful for HQL scripts that generate no table based data and instead write results to STDOUT. In practice we've seen these scripts used in data processing job orchestration scripts (e.g `bash`) to check for new data, calculate processing boundaries, etc. These values are then used to appropriately configure and launch some downstream job.\n- Support abstract base class [#48](https://github.com/klarna/HiveRunner/issues/48).\n\n## [3.0.0] - 2016-02-05\n### Changed\n- Upgraded to Hive 1.2.1 (Note: new major release with backwards incompatibility issues). As of Hive 1.2 there are a number of new reserved keywords, see [DDL manual](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-Keywords,Non-reservedKeywordsandReservedKeywords) for more information. If you happen to have one of these as an identifier, you could either backtick quote them (e.g. \\`date\\`, \\`timestamp\\` or \\`update\\`) or set hive.support.sql11.reserved.keywords=false.                                            \n- Users of Hive version 0.14 or older are recommended to use HiveRunner version 2.6.0.\n- Removed the custom HiveConf hive.vs. Use hadoop.tmp.dir instead.\n\n## [2.6.0] - 2015-12-01\n### Added\n- Introduced command shell emulations to replicate different handling of full line comments in `hive` and `beeline` shells. Now strips full line comments for executed scripts to match the behaviour of the `hive -f` file option. \n- Option to use files as input for com.klarna.hiverunner.HiveShell.execute(...).\n\n## [2.5.1] - 2015-11-12\n### Changed\n- Fixed deadlock in `ThrowOnTimeout.java` that occurred when running with long running test case and disabled timeout.\n\n## [2.5.0]\n### Added\n- Added support with `HiveShell.insertInto` for fluently generating test data in a table storage format agnostic manner.\n\n## [2.4.0]\n### Changed\n- Enabled any hiveconf variables to be set as System properties by using the naming convention hiveconf_[HiveConf property name]. e.g: hiveconf_hive.execution.engine.\n- Fixed bug: Results sets bigger than 100 rows only returned the first 100 rows. \n\n## [2.3.0]\n### Changed\n- Merged Tez and MR context into the same context again. Now, the same test suite may alter between execution engines by doing e.g.: \n\n     hive> set hive.execution.engine=tez;\n     hive> [some query]\n     hive> set hive.execution.engine=mr;\n     hive> [some query]\n\n## [2.2.0]\n### Added\n- Added support for setting hivevars via HiveShell.\n"
  },
  {
    "path": "CODE-OF-CONDUCT.md",
    "content": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nIn the interest of fostering an open and welcoming environment, we as\ncontributors and maintainers pledge to making participation in our project and\nour community a harassment-free experience for everyone, regardless of age, body\nsize, disability, ethnicity, gender identity and expression, level of experience,\nnationality, personal appearance, race, religion, or sexual identity and\norientation.\n\n## Our Standards\n\nExamples of behaviour that contributes to creating a positive environment\ninclude:\n\n* Using welcoming and inclusive language\n* Being respectful of differing viewpoints and experiences\n* Gracefully accepting constructive criticism\n* Focusing on what is best for the community\n* Showing empathy towards other community members\n\nExamples of unacceptable behaviour by participants include:\n\n* The use of sexualised language or imagery and unwelcome sexual attention or\n  advances\n* Trolling, insulting/derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or electronic\n  address, without explicit permission\n* Other conduct which could reasonably be considered inappropriate in a\n  professional setting\n\n## Our Responsibilities\n\nProject maintainers are responsible for clarifying the standards of acceptable\nbehaviour and are expected to take appropriate and fair corrective action in\nresponse to any instances of unacceptable behaviour.\n\nProject maintainers have the right and responsibility to remove, edit, or\nreject comments, commits, code, wiki edits, issues, and other contributions\nthat are not aligned to this Code of Conduct, or to ban temporarily or\npermanently any contributor for other behaviours that they deem inappropriate,\nthreatening, offensive, or harmful.\n\n## Scope\n\nThis Code of Conduct applies both within project spaces and in public spaces\nwhen an individual is representing the project or its community. Examples of\nrepresenting a project or community include using an official project e-mail\naddress, posting via an official social media account, or acting as an appointed\nrepresentative at an online or offline event. Representation of a project may be\nfurther defined and clarified by project maintainers.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behaviour may be\nreported by contacting [a member of the project team](https://github.com/orgs/HiveRunner/teams/hiverunner-committers). All\ncomplaints will be reviewed and investigated and will result in a response that\nis deemed necessary and appropriate to the circumstances. The project team is\nobligated to maintain confidentiality with regard to the reporter of an incident.\nFurther details of specific enforcement policies may be posted separately.\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\nfaith may face temporary or permanent repercussions as determined by other\nmembers of the project's leadership.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n[homepage]: https://www.contributor-covenant.org\n\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# How To Contribute\n\nWe'd love to accept your patches and contributions to this project. There are just a few guidelines you need to follow which are described in detail below.\n\n## 1. Fork this repo\n\nYou should create a fork of this project in your account and work from there. You can create a fork by clicking the fork button in GitHub.\n\n## 2. One feature, one branch\n\nWork for each new feature/issue should occur in its own branch. To create a new branch from the command line:\n```shell\ngit checkout -b my-new-feature\n```\nwhere \"my-new-feature\" describes what you're working on.\n\n## 3. Add unit tests\nIf your contribution modifies existing or adds new code please add corresponding unit tests for this.\n\n## 4. Ensure that the build passes\n\nRun\n```shell\nmvn package\n```\nand check that there are no errors.\n\n## 5. Add documentation for new or updated functionality\n\nPlease review all of the .md files in this project to see if they are impacted by your change and update them accordingly.\n\n## 6. Add to CHANGELOG.md\n\nAny notable changes should be recorded in the CHANGELOG.md following the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) conventions.\n\n## 7. Submit a pull request and describe the change\n\nPush your changes to your branch and open a pull request against the parent repo on GitHub. The project administrators will review your pull request and respond with feedback.\n\n# How your contribution gets merged\n\nUpon pull request submission, your code will be reviewed by the maintainers. They will confirm at least the following:\n\n- Tests run successfully (unit, coverage, integration, style).\n- Contribution policy has been followed.\n\nTwo (human) reviewers will need to sign off on your pull request before it can be merged.\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "\n[![Maven Central](https://maven-badges.herokuapp.com/maven-central/io.github.hiverunner/hiverunner/badge.svg?subject=io.github.hiverunner:hiverunner)](https://maven-badges.herokuapp.com/maven-central/io.github.hiverunner/hiverunner) \n[![Build](https://github.com/HiveRunner/hiverunner/workflows/build/badge.svg)](https://github.com/HiveRunner/HiveRunner/actions?query=workflow:\"build\")\n[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)\n\n![ScreenShot](/images/HiveRunnerSplash.png)\n\n# HiveRunner\n\nWelcome to HiveRunner - Zero installation open source unit testing of [Hive](https://hive.apache.org/) applications.\n\n[Watch the HiveRunner teaser on youtube!](http://youtu.be/B7yEAHwgi2w)\n\nWelcome to the open source project HiveRunner. HiveRunner is a unit test framework based on JUnit (4 & 5) and enables \nTDD development of Hive SQL without the need for any installed dependencies. All you need is to add HiveRunner to your \n`pom.xml` as any other library and you're good to go.\n\nHiveRunner is under constant development. It is used extensively by many companies. Please feel free to suggest \nimprovements both as pull requests and as written requests.\n\n\n## Overview\n\nHiveRunner enables you to write Hive SQL as releasable tested artifacts. It will require you to parametrize and \nmodularize Hive SQL in order to make it testable. The bits and pieces of code should then be wired together with some \norchestration/workflow/build tool of your choice, to be runnable in your environment (e.g. Oozie, Pentaho, Talend, \nMaven, etc.) \n\nSo, even though your current Hive SQL probably won't run off the shelf within HiveRunner, we believe the enforced \ntestability and enabling of a TDD workflow will do as much good to the scripting world of SQL as it has for the Java \ncommunity.\n\n## Versions\n\nDifferent versions of HiveRunner target different versions of Hive as follows:\n\n| HiveRunner Version | Hive Version | Status                     | Source Code Branch                                     |\n|--------------------|--------------|----------------------------|--------------------------------------------------------|\n| 7.x                | 4.x          | New, active development    | https://github.com/HiveRunner/HiveRunner/tree/hive-4.x |\n| 6.x                | 3.x          | Stable, active development | https://github.com/HiveRunner/HiveRunner (i.e. `main`) |\n| 5.x                | 2.x          | Stable, bug fixes only     | https://github.com/HiveRunner/HiveRunner/tree/hive-2.x |\n\n\n# Cook Book\n\n## 1. Include HiveRunner\n\nHiveRunner is published to [Maven Central](https://search.maven.org/search?q=hiverunner). To start to use it, add a dependency to HiveRunner to your pom file:\n\n    <dependency>\n        <groupId>io.github.hiverunner</groupId>\n        <artifactId>hiverunner</artifactId>\n        <version>[HIVERUNNER VERSION]</version>\n        <scope>test</scope>\n    </dependency>\n\nAlternatively, if you want to build from source, clone this repo and build with:\n\n     mvn install\n\nThen add the dependency as mentioned above.\n\nAlso explicitly add the surefire plugin and configure forkMode=always to avoid OutOfMemory when building big test suites.\n\n    <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-surefire-plugin</artifactId>\n        <version>2.21.0</version>\n        <configuration>\n            <forkMode>always</forkMode>\n        </configuration>\n    </plugin>\n\nAs an alternative if this does not solve the OOM issues, try increase the -Xmx and -XX:MaxPermSize settings. For example:\n\n    <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-surefire-plugin</artifactId>\n        <version>2.21.0</version>\n        <configuration>\n            <forkCount>1</forkCount>\n            <reuseForks>false</reuseForks>\n            <argLine>-Xmx2048m -XX:MaxPermSize=512m</argLine>\n        </configuration>\n    </plugin>\n\n(please note that the forkMode option is deprecated and you should use forkCount and reuseForks instead)\n\nWith forkCount and reuseForks there is a possibility to reduce the test execution time drastically, depending on your hardware. A plugin configuration which are using one fork per CPU core and reuse threads would look like:\n\n    <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-surefire-plugin</artifactId>\n        <version>2.21.0</version>\n        <configuration>\n            <forkCount>1C</forkCount>\n            <reuseForks>true</reuseForks>\n            <argLine>-Xmx2048m -XX:MaxPermSize=512m</argLine>\n        </configuration>\n    </plugin>\n\nBy default, HiveRunner uses mapreduce (mr) as the execution engine for Hive. If you wish to run using Tez, set the \nSystem property `hiveconf_hive.execution.engine` to 'tez'.\n\n(Any Hive conf property may be overridden by prefixing it with 'hiveconf_')\n        \n        <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-surefire-plugin</artifactId>\n            <version>2.21.0</version>\n            <configuration>\n                <systemProperties>\n                    <hiveconf_hive.execution.engine>tez</hiveconf_hive.execution.engine>\n                    <hiveconf_hive.exec.counters.pull.interval>1000</hiveconf_hive.exec.counters.pull.interval>\n                </systemProperties>\n            </configuration>\n        </plugin>\n\n### Timeout\nIt's possible to configure HiveRunner to make tests time out after some time and retry those tests a couple of times, but only when using `StandaloneHiveRunner` as this is not available in the `HiveRunnerExtension` (from HiveRunner 5.x and up). This is to cover for the bug\nhttps://issues.apache.org/jira/browse/TEZ-2475 that at times causes test cases to not terminate due to a lost DAG reference.\nThe timeout feature can be configured via the 'enableTimeout', 'timeoutSeconds' and 'timeoutRetries' properties.\nA configuration which enables timeouts after 30 seconds and allows 2 retries would look like:\n\n    <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-surefire-plugin</artifactId>\n        <version>2.21.0</version>\n        <configuration>\n            <systemProperties>\n                <enableTimeout>true</enableTimeout>\n                <timeoutSeconds>30</timeoutSeconds>\n                <timeoutRetries>2</timeoutRetries>\n            </systemProperties>\n        </configuration>\n    </plugin>\n\n\n### Logging\n\nHiveRunner uses [SLF4J](https://www.slf4j.org/) so you should configure logging in your tests using any compatible logging framework.\n\n## 2. Look at the examples\n\nLook at the [com.klarna.hiverunner.examples.HelloHiveRunnerTest](/src/test/java/com/klarna/hiverunner/examples/HelloHiveRunnerTest.java) reference test case to get a feeling for how a typical test case looks like in JUnit5. To find JUnit4 versions of the examples, look at [com.klarna.hiverunner.examples.junit4.HelloHiveRunnerTest](/src/test/java/com/klarna/hiverunner/examples/junit4/HelloHiveRunnerTest.java).\n\nIf you're put off by the verbosity of the annotations, there's always the possibility to use HiveShell in a more interactive mode.  The [com.klarna.hiverunner.SerdeTest](/src/test/java/com/klarna/hiverunner/SerdeTest.java) adds a resource (test data) interactively with HiveShell instead of using annotations.\n\nAnnotations and interactive mode can be mixed and matched, however you'll always need to include the [com.klarna.hiverunner.annotations.HiveSQL](/src/main/java/com/klarna/hiverunner/annotations/HiveSQL.java) annotation e.g:\n\n         @HiveSQL(files = {\"serdeTest/create_table.sql\", \"serdeTest/hql_custom_serde.sql\"}, autoStart = false)\n         public HiveShell hiveShell;\n\nNote that the *autostart = false* is needed for the interactive mode. It can be left out when running with only annotations.\n\n### Sequence files\nIf you work with __sequence files__ (Or anything else than regular text files) make sure to take a look at [ResourceOutputStreamTest](/src/test/java/com/klarna/hiverunner/ResourceOutputStreamTest.java) \nfor an example of how to use the new method [HiveShell](src/main/java/com/klarna/hiverunner/HiveShell.java)\\#getResourceOutputStream to manage test input data. \n\n### Programatically create test input data\n\nTest data can be programmatically inserted into any Hive table using `HiveShell.insertInto(...)`. This seamlessly handles different storage formats and partitioning types allowing you to focus on the data required by your test scenarios:\n\n    hiveShell.execute(\"create database test_db\");\n    hiveShell.execute(\"create table test_db.test_table (\"\n        + \"c1 string,\"\n        + \"c2 string,\"\n        + \"c3 string\"\n        + \")\"\n        + \"partitioned by (p1 string)\"\n        + \"stored as orc\");\n\n    hiveShell.insertInto(\"test_db\", \"test_table\")\n        .withColumns(\"c1\", \"p1\").addRow(\"v1\", \"p1\")       // add { \"v1\", null, null, \"p1\" }\n        .withAllColumns().addRow(\"v1\", \"v2\", \"v3\", \"p1\")  // add { \"v1\", \"v2\", \"v3\", \"p1\" }\n        .copyRow().set(\"c1\", \"v4\")                        // add { \"v4\", \"v2\", \"v3\", \"p1\" }\n        .addRowsFromTsv(file)                             // parses TSV data out of a file resource\n        .addRowsFrom(file, fileParser)                    // parses custom data out of a file resource\n        .commit();\n\nSee [com.klarna.hiverunner.examples.InsertTestDataTest](/src/test/java/com/klarna/hiverunner/examples/InsertTestDataTest.java) for working examples.\n\n## 3. Understand the order of execution\n\nHiveRunner will in default mode set up and start the HiveShell before the test method is invoked. If autostart is set to false, the [HiveShell](/src/main/java/com/klarna/hiverunner/HiveShell.java) must be started manually from within the test method. Either way, HiveRunner will do the following steps when start is invoked:\n\n1. Merge any [@HiveProperties](/src/main/java/com/klarna/hiverunner/annotations/HiveProperties.java) from the test case with the Hive conf\n2. Start the HiveServer with the merged conf\n3. Copy all [@HiveResource](/src/main/java/com/klarna/hiverunner/annotations/HiveResource.java) data into the temp file area for the test\n4. Execute all fields annotated with [@HiveSetupScript](/src/main/java/com/klarna/hiverunner/annotations/HiveSetupScript.java)\n5. Execute the script files given in the [@HiveSQL](/src/main/java/com/klarna/hiverunner/annotations/HiveSQL.java) annotation\n\nThe [HiveShell](/src/main/java/com/klarna/hiverunner/HiveShell.java) field annotated with [@HiveSQL](/src/main/java/com/klarna/hiverunner/annotations/HiveSQL.java) will always be injected before the test method is invoked.\n\n\n# Hive version compatibility\n\n- This version of HiveRunner is built for Hive 3.1.2.\n- For Hive 2.x support please use HiveRunner 5.x.\n- Command shell emulations are provided to closely match the behaviour of both the Hive CLI and Beeline interactive shells. The desired emulation can be specified in your `pom.xml` file like so: \n\n        <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-surefire-plugin</artifactId>\n            <version>2.21.0</version>\n            <configuration>\n                <systemProperties>\n                    <!-- Defaults to HIVE_CLI, other options include BEELINE and HIVE_CLI_PRE_V200 -->\n                    <commandShellEmulator>BEELINE</commandShellEmulator>\n                </systemProperties>\n            </configuration>\n        </plugin>\n\n  Or provided on the command line using a system property:\n\n      mvn -DcommandShellEmulator=BEELINE test\n\n# Future work and Limitations\n\n* HiveRunner does not allow the `add jar` statement. It is considered bad practice to keep environment specific code together with the business logic that targets HiveRunner. Keep environment specific stuff in separate files and use your build/orchestration/workflow tool to run the right files in the right order in the right environment. When running HiveRunner, all SerDes available on the classpath of the IDE/maven will be available.\n\n* HiveRunner runs Hive and Hive runs on top of Hadoop, and Hadoop has limited support for Windows machines. Installing [Cygwin](http://www.cygwin.com/ \"Cygwin\") might help out.\n\n* Currently the HiveServer spins up and tears down for every test method. As a performance option it should be possible to clean the HiveServer and metastore between each test method invocation. The choice should probably be exposed to the test writer. By switching between different strategies, side effects/leakage can be ruled out during test case debugging. See [#69](https://github.com/HiveRunner/HiveRunner/issues/69).\n\n# Known Issues\n\n### UnknownHostException\nI've had issues with UnknownHostException on OS X after upgrading my system or running Docker. \nUsually a restart of my machine solved it, but last time I got some corporate \nstuff installed the restarts stopped working and I kept getting UnknownHostExceptions. \nFollowing this simple guide solved my problem:\nhttp://crunchify.com/getting-java-net-unknownhostexception-nodename-nor-servname-provided-or-not-known-error-on-mac-os-x-update-your-privateetchosts-file/\n\n### Tez queries do not terminate\nTez will at times forget the process id of a random DAG. This will cause the query to never terminate. To get around this there is \na timeout and retry functionality implemented in HiveRunner:\n \n         <plugin>\n             <groupId>org.apache.maven.plugins</groupId>\n             <artifactId>maven-surefire-plugin</artifactId>\n             <version>2.21.0</version>\n             <configuration>\n                 <systemProperties>\n                     <enableTimeout>true</enableTimeout>\n                     <timeoutSeconds>30</timeoutSeconds>\n                     <timeoutRetries>2</timeoutRetries>\n                     </systemProperties>\n             </configuration>\n         </plugin>\n         \nMake sure to set the timeoutSeconds to that of your slowest test in the test suite and then add some padding.\n\n# Contact\n\n# Mailing List\nIf you would like to ask any questions about or discuss HiveRunner please join our mailing list at\n\n  [https://groups.google.com/forum/#!forum/hive-runner-user](https://groups.google.com/forum/#!forum/hive-runner-user)\n\n# History\nThis project was initially developed and maintained by [Klarna](https://klarna.github.io/) and then by [Expedia Group](https://expediagroup.github.io/) before moving to its own top-level organisation on GitHub.\n\n# Legal\nThis project is available under the [Apache 2.0 License](http://www.apache.org/licenses/LICENSE-2.0.html).\n\nCopyright 2021-2024 The HiveRunner Contributors.\n\nCopyright 2013-2021 Klarna AB.\n"
  },
  {
    "path": "RELEASING.md",
    "content": "# Releasing HiveRunner to Maven Central\n\nHiveRunner has been set up to build continuously and also to deploy SNAPSHOTS to Sonatype and releases to Maven Central via GitHub Actions.\n\n## Deploying a SNAPSHOT to Sonatype\n\n* Select the https://github.com/HiveRunner/HiveRunner/actions/workflows/deploy.yml worfklow\n* Click \"Run workflow\"\n* Select the branch to use to deploy a SNAPSHOT from:\n  * Use `main` as the branch for a Hive 3.x release\n  * Use `hive-4.x` as the branch for a Hive 4.x release\n  * Use `hive-2.x` as the branch for a Hive 2.x release\n* Run the workflow\n* SNAPSHOT artifacts will be available at https://s01.oss.sonatype.org/content/repositories/snapshots/io/github/hiverunner/hiverunner/\n\n## Deploying a release to Maven Central\n\n* Ensure the `pom.xml` has the SNAPSHOT version set to the value you would like to make a release from\n* Update `CHANGELOG.md` with the date corresponding to when you're performing the release\n* Select the https://github.com/HiveRunner/HiveRunner/actions/workflows/release.yml worfklow\n* Click \"Run workflow\"\n* Select the branch to use to deploy a release from:\n  * Use `main` as the branch for a Hive 3.x release\n  * Use `hive-4.x` as the branch for a Hive 4.x release \n  * Use `hive-2.x` as the branch for a Hive 2.x release\n* Run the workflow\n* Release artifacts will be available at https://repo1.maven.org/maven2/io/github/hiverunner/hiverunner/\n* It can take a few hours before the artifacts show up in searches performed at https://search.maven.org/search?q=hiverunner\n"
  },
  {
    "path": "pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n  <modelVersion>4.0.0</modelVersion>\n\n  <groupId>io.github.hiverunner</groupId>\n  <artifactId>hiverunner</artifactId>\n  <version>6.1.1-SNAPSHOT</version>\n  <name>HiveRunner</name>\n  <description>HiveRunner is a unit test framework based on JUnit (4 or 5) that enables TDD development of Hive SQL without the need of any installed dependencies.</description>\n  <url>https://github.com/HiveRunner/HiveRunner</url>\n  <!-- below isn't the actual inception year but is the year the copyright changed -->\n  <inceptionYear>2021</inceptionYear>\n\n  <licenses>\n    <license>\n      <name>The Apache Software License, Version 2.0</name>\n      <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>\n      <distribution>repo</distribution>\n    </license>\n  </licenses>\n  \n  <developers>\n    <developer>\n      <name>HiveRunner Committers</name>\n      <organization>HiveRunner</organization>\n      <organizationUrl>https://github.com/HiveRunner</organizationUrl>\n    </developer>\n  </developers>  \n\n  <scm>\n    <connection>scm:git:https://github.com/HiveRunner/HiveRunner.git</connection>\n    <developerConnection>scm:git:https://github.com/HiveRunner/HiveRunner.git</developerConnection>\n    <url>git@github.com:HiveRunner/HiveRunner.git</url>\n    <tag>HEAD</tag>\n  </scm>\n\n  <properties>\n    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n\n    <hadoop.version>3.1.0</hadoop.version>\n    <hive.execution.engine>mr</hive.execution.engine>\n    <hive.version>3.1.2</hive.version>\n    <junit.jupiter.version>5.7.1</junit.jupiter.version>\n    <license.maven.plugin.version>3.0</license.maven.plugin.version>\n    <tez.version>0.9.1</tez.version>\n    <mockito.version>3.8.0</mockito.version>\n  </properties>\n  \n  <dependencies>\n    <dependency>\n      <groupId>org.apache.hive</groupId>\n      <artifactId>hive-serde</artifactId>\n      <version>${hive.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-annotations</artifactId>\n      <version>2.9.5</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hive</groupId>\n      <artifactId>hive-jdbc</artifactId>\n      <version>${hive.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hive.hcatalog</groupId>\n      <artifactId>hive-webhcat-java-client</artifactId>\n      <version>${hive.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hive</groupId>\n      <artifactId>hive-service</artifactId>\n      <version>${hive.version}</version>\n    </dependency>\n\n    <dependency>\n      <artifactId>tez-dag</artifactId>\n      <groupId>org.apache.tez</groupId>\n      <version>${tez.version}</version>\n    </dependency>\n\n    <dependency>\n      <artifactId>tez-common</artifactId>\n      <groupId>org.apache.tez</groupId>\n      <version>${tez.version}</version>\n    </dependency>\n\n    <dependency>\n      <artifactId>tez-mapreduce</artifactId>\n      <groupId>org.apache.tez</groupId>\n      <version>${tez.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hive</groupId>\n      <artifactId>hive-contrib</artifactId>\n      <version>${hive.version}</version>\n      <scope>test</scope>\n    </dependency>\n\n    <dependency>\n      <groupId>org.reflections</groupId>\n      <artifactId>reflections</artifactId>\n      <version>0.9.8</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hadoop</groupId>\n      <artifactId>hadoop-mapreduce-client-common</artifactId>\n      <version>${hadoop.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.apache.hadoop</groupId>\n      <artifactId>hadoop-mapreduce-client-core</artifactId>\n      <version>${hadoop.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>com.github.stefanbirkner</groupId>\n      <artifactId>system-rules</artifactId>\n      <version>1.19.0</version>\n      <scope>test</scope>\n    </dependency>\n\n    <!-- Always put this before JUnit or the class loader might load the\n      wrong Matcher -->\n    <dependency>\n      <groupId>org.hamcrest</groupId>\n      <artifactId>hamcrest-all</artifactId>\n      <version>1.3</version>\n      <scope>test</scope>\n    </dependency>\n\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-core</artifactId>\n      <version>${mockito.version}</version>\n      <scope>test</scope>\n    </dependency>\n\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-junit-jupiter</artifactId>\n      <version>${mockito.version}</version>\n      <scope>test</scope>\n    </dependency>\n\n    <dependency>\n      <groupId>junit</groupId>\n      <artifactId>junit</artifactId>\n      <version>4.13.2</version>\n      <scope>provided</scope>\n    </dependency>\n\n    <dependency>\n      <groupId>org.junit.jupiter</groupId>\n      <artifactId>junit-jupiter</artifactId>\n      <version>${junit.jupiter.version}</version>\n    </dependency>\n\n    <dependency>\n      <groupId>org.junit.vintage</groupId>\n      <artifactId>junit-vintage-engine</artifactId>\n      <version>${junit.jupiter.version}</version>\n    </dependency>\n\n  </dependencies>\n\n  <build>\n    <plugins>\n      <!-- forkMode:always resolves OOM error when running unit tests -->\n      <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-surefire-plugin</artifactId>\n        <version>2.22.2</version>\n        <configuration>\n          <!-- Hiverunner need this for some queries (property -XX:MaxPermSize\n            was removed in java 8) -->\n          <argLine>-Xmx2024m</argLine>\n          <!-- below needed due to https://github.com/HiveRunner/HiveRunner/commit/1f9a9b353c3b072f7898a6b4fa277474674d4b54 -->\n          <reuseForks>false</reuseForks>\n          <systemProperties>\n            <!--\n              Any hive conf property may be overridden here by suffixing\n              it with 'hiveconf_'\n            -->\n            <hiveconf_hive.execution.engine>${hive.execution.engine}</hiveconf_hive.execution.engine>\n            <hiveconf_hive.exec.counters.pull.interval>1000</hiveconf_hive.exec.counters.pull.interval>\n            <enableTimeout>false</enableTimeout>\n            <timeoutSeconds>30</timeoutSeconds>\n            <timeoutRetries>2</timeoutRetries>\n          </systemProperties>\n        </configuration>\n      </plugin>\n      <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-compiler-plugin</artifactId>\n        <version>3.7.0</version>\n        <configuration>\n          <source>1.8</source>\n          <target>1.8</target>\n        </configuration>\n      </plugin>\n      <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-jar-plugin</artifactId>\n        <version>3.2.0</version>\n        <configuration>\n          <archive>\n            <manifestEntries>\n              <Build-Version>${project.version}</Build-Version>\n              <Build-DateTime>${maven.build.timestamp}</Build-DateTime>\n              <Maven-GroupId>${project.groupId}</Maven-GroupId>\n              <Maven-ArtifactId>${project.artifactId}</Maven-ArtifactId>\n            </manifestEntries>\n          </archive>\n        </configuration>\n      </plugin>\n\n      <plugin>\n        <groupId>com.mycila</groupId>\n        <artifactId>license-maven-plugin</artifactId>\n        <version>${license.maven.plugin.version}</version>\n        <dependencies>\n          <dependency>\n            <groupId>com.mycila</groupId>\n            <artifactId>license-maven-plugin-git</artifactId>\n            <version>${license.maven.plugin.version}</version>\n          </dependency>\n        </dependencies>\n        <configuration>\n          <header>src/main/license/APACHE-2.txt</header>\n          <properties>\n            <owner>The HiveRunner Contributors</owner>\n          </properties>\n          <includes>\n            <include>src/main/java/**</include>\n            <include>src/test/java/**</include>\n          </includes>\n        </configuration>\n        <executions>\n          <execution>\n            <phase>validate</phase>\n            <goals>\n              <goal>format</goal>\n            </goals>\n          </execution>\n        </executions>\n      </plugin>\n      \n     <plugin>\n        <groupId>org.apache.maven.plugins</groupId>\n        <artifactId>maven-release-plugin</artifactId>\n        <version>3.0.0-M1</version>\n        <configuration>\n          <tagNameFormat>v@{project.version}</tagNameFormat>\n          <autoVersionSubmodules>true</autoVersionSubmodules>\n          <useReleaseProfile>false</useReleaseProfile>\n          <releaseProfiles>oss-release</releaseProfiles>\n          <goals>deploy</goals>\n        </configuration>\n      </plugin>      \n\n      <plugin>\n        <groupId>org.sonatype.plugins</groupId>\n        <artifactId>nexus-staging-maven-plugin</artifactId>\n        <version>1.6.8</version>\n        <extensions>true</extensions>\n        <configuration>\n          <serverId>ossrh</serverId>\n          <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>\n          <autoReleaseAfterClose>true</autoReleaseAfterClose>\n        </configuration>\n      </plugin>\n\n    </plugins>\n  </build>\n\n  <profiles>\n    <profile>\n      <id>oss-release</id>\n      <distributionManagement>\n        <snapshotRepository>\n          <id>ossrh</id>\n          <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>\n        </snapshotRepository>\n        <repository>\n          <id>ossrh</id>\n          <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/</url>\n        </repository>\n      </distributionManagement>\n      <build>\n        <plugins>\n          <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-source-plugin</artifactId>\n            <version>3.2.0</version>\n            <executions>\n              <execution>\n                <id>attach-sources</id>\n                <goals>\n                  <goal>jar-no-fork</goal>\n                </goals>\n              </execution>\n            </executions>\n          </plugin>\n          <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-javadoc-plugin</artifactId>\n            <version>3.2.0</version>\n            <configuration>\n               <encoding>${project.build.sourceEncoding}</encoding>\n               <source>8</source>\n               <detectJavaApiLink>false</detectJavaApiLink>\n               <doclint>none</doclint>\n            </configuration>            \n            <executions>\n              <execution>\n                <id>attach-javadocs</id>\n                <goals>\n                  <goal>jar</goal>\n                </goals>\n              </execution>\n            </executions>\n          </plugin>\n          <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-gpg-plugin</artifactId>\n            <version>1.6</version>\n            <executions>\n              <execution>\n                <id>sign-artifacts</id>\n                <phase>verify</phase>\n                <goals>\n                  <goal>sign</goal>\n                </goals>\n                <configuration>\n                    <gpgArguments>\n                        <arg>--pinentry-mode</arg>\n                        <arg>loopback</arg>\n                    </gpgArguments>                \n                </configuration>\n              </execution>\n            </executions>\n          </plugin>\n        </plugins>\n      </build>\n    </profile>\n    <profile>\n      <id>tez</id>\n      <properties>\n        <hive.execution.engine>tez</hive.execution.engine>\n      </properties>\n    </profile>\n  </profiles>\n</project>\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveRunnerCore.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.reflections.ReflectionUtils.withAnnotation;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.lang.reflect.Field;\nimport java.net.URISyntaxException;\nimport java.nio.charset.Charset;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.common.base.Preconditions;\nimport com.google.common.io.Resources;\nimport com.klarna.hiverunner.annotations.HiveProperties;\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport com.klarna.hiverunner.builder.HiveShellBuilder;\nimport com.klarna.hiverunner.builder.Script;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.reflection.ReflectionUtils;\n\nclass HiveRunnerCore {\n\n    /**\n     * Traverses the test case annotations. Will inject a HiveShell in the test case that envelopes the HiveServer.\n     */\n    HiveShellContainer createHiveServerContainer(List<? extends Script> scripts, Object testCase,\n                                                 Path baseDir, HiveRunnerConfig config)\n            throws IOException {\n\n        HiveServerContext context = new StandaloneHiveServerContext(baseDir, config);\n\n        return buildShell(scripts, testCase, config, context);\n    }\n\n    private HiveShellContainer buildShell(List<? extends Script> scripts, Object testCase, HiveRunnerConfig config,\n                                          HiveServerContext context) throws IOException {\n        HiveServerContainer hiveTestHarness = new HiveServerContainer(context);\n\n        HiveShellBuilder hiveShellBuilder = new HiveShellBuilder();\n        hiveShellBuilder.setCommandShellEmulation(config.getCommandShellEmulator());\n\n        HiveShellField shellSetter = loadScriptUnderTest(testCase, hiveShellBuilder);\n        if (!scripts.isEmpty()) {\n            hiveShellBuilder.overrideScriptsUnderTest(scripts);\n        }\n\n        hiveShellBuilder.setHiveServerContainer(hiveTestHarness);\n\n        loadAnnotatedResources(testCase, hiveShellBuilder);\n\n        loadAnnotatedProperties(testCase, hiveShellBuilder);\n\n        loadAnnotatedSetupScripts(testCase, hiveShellBuilder);\n\n        // Build shell\n        HiveShellContainer shell = hiveShellBuilder.buildShell();\n\n        // Set shell\n        shellSetter.setShell(shell);\n\n        if (shellSetter.isAutoStart()) {\n            shell.start();\n        }\n        return shell;\n    }\n\n    private HiveShellField loadScriptUnderTest(Object testCaseInstance, HiveShellBuilder hiveShellBuilder) {\n        try {\n            Set<Field> fields = ReflectionUtils.getAllFields(testCaseInstance.getClass(), withAnnotation(HiveSQL.class));\n\n            Preconditions.checkState(fields.size() == 1, \"Exact one field should to be annotated with @HiveSQL\");\n\n            Field field = fields.iterator().next();\n            HiveSQL annotation = field.getAnnotation(HiveSQL.class);\n            List<Path> scriptPaths = getScriptPaths(annotation);\n            Charset charset = annotation.encoding().equals(\"\") ?\n                    Charset.defaultCharset() : Charset.forName(annotation.encoding());\n\n            boolean isAutoStart = annotation.autoStart();\n\n            hiveShellBuilder.setScriptsUnderTest(scriptPaths, charset);\n\n            return new HiveShellField() {\n                @Override\n                public void setShell(HiveShell shell) {\n                    ReflectionUtils.setField(testCaseInstance, field.getName(), shell);\n                }\n\n                @Override\n                public boolean isAutoStart() {\n                    return isAutoStart;\n                }\n            };\n        } catch (Throwable t) {\n            throw new IllegalArgumentException(\"Failed to init field annotated with @HiveSQL: \" + t.getMessage(), t);\n        }\n    }\n\n    protected List<Path> getScriptPaths(HiveSQL annotation) throws URISyntaxException {\n        List<Path> scriptPaths = new ArrayList<>();\n        for (String scriptFilePath : annotation.files()) {\n            Path file = Paths.get(Resources.getResource(scriptFilePath).toURI());\n            assertFileExists(file);\n            scriptPaths.add(file);\n        }\n        return scriptPaths;\n    }\n\n    private void assertFileExists(Path file) {\n        Preconditions.checkState(Files.exists(file), \"File \" + file + \" does not exist\");\n    }\n\n    private void loadAnnotatedSetupScripts(Object testCase, HiveShellBuilder workFlowBuilder) {\n        Set<Field> setupScriptFields = ReflectionUtils.getAllFields(testCase.getClass(),\n                withAnnotation(HiveSetupScript.class));\n\n        for (Field setupScriptField : setupScriptFields) {\n            if (ReflectionUtils.isOfType(setupScriptField, String.class)) {\n                String script = ReflectionUtils.getFieldValue(testCase, setupScriptField.getName(), String.class);\n                workFlowBuilder.addSetupScript(script);\n            } else if (ReflectionUtils.isOfType(setupScriptField, File.class) ||\n                    ReflectionUtils.isOfType(setupScriptField, Path.class)) {\n                Path path = getMandatoryPathFromField(testCase, setupScriptField);\n                workFlowBuilder.addSetupScript(readAll(path));\n            } else {\n                throw new IllegalArgumentException(\n                        \"Field annotated with @HiveSetupScript currently only supports type String, File and Path\");\n            }\n        }\n    }\n\n    private static String readAll(Path path) {\n        try {\n            return new String(Files.readAllBytes(path), StandardCharsets.UTF_8);\n        } catch (IOException e) {\n            throw new IllegalStateException(\"Unable to read \" + path + \": \" + e.getMessage(), e);\n        }\n    }\n\n    private void loadAnnotatedResources(Object testCase, HiveShellBuilder workFlowBuilder) throws IOException {\n        Set<Field> fields = ReflectionUtils.getAllFields(testCase.getClass(), withAnnotation(HiveResource.class));\n\n        for (Field resourceField : fields) {\n\n            HiveResource annotation = resourceField.getAnnotation(HiveResource.class);\n            String targetFile = annotation.targetFile();\n\n            if (ReflectionUtils.isOfType(resourceField, String.class)) {\n                String data = ReflectionUtils.getFieldValue(testCase, resourceField.getName(), String.class);\n                workFlowBuilder.addResource(targetFile, data);\n            } else if (ReflectionUtils.isOfType(resourceField, File.class) ||\n                    ReflectionUtils.isOfType(resourceField, Path.class)) {\n                Path dataFile = getMandatoryPathFromField(testCase, resourceField);\n                workFlowBuilder.addResource(targetFile, dataFile);\n            } else {\n                throw new IllegalArgumentException(\n                        \"Fields annotated with @HiveResource currently only supports field type String, File or Path\");\n            }\n        }\n    }\n\n    private Path getMandatoryPathFromField(Object testCase, Field resourceField) {\n        Path path;\n        if (ReflectionUtils.isOfType(resourceField, File.class)) {\n            File dataFile = ReflectionUtils.getFieldValue(testCase, resourceField.getName(), File.class);\n            path = Paths.get(dataFile.toURI());\n        } else if (ReflectionUtils.isOfType(resourceField, Path.class)) {\n            path = ReflectionUtils.getFieldValue(testCase, resourceField.getName(), Path.class);\n        } else {\n            throw new IllegalArgumentException(\n                    \"Only Path or File type is allowed on annotated field \" + resourceField);\n        }\n\n        Preconditions.checkArgument(Files.exists(path), \"File %s does not exist\", path);\n        return path;\n    }\n\n    private void loadAnnotatedProperties(Object testCase, HiveShellBuilder workFlowBuilder) {\n        for (Field hivePropertyField : ReflectionUtils.getAllFields(testCase.getClass(),\n                withAnnotation(HiveProperties.class))) {\n            Preconditions.checkState(ReflectionUtils.isOfType(hivePropertyField, Map.class),\n                    \"Field annotated with @HiveProperties should be of type Map<String, String>\");\n            workFlowBuilder.putAllProperties(\n                    ReflectionUtils.getFieldValue(testCase, hivePropertyField.getName(), Map.class));\n        }\n    }\n\n    /**\n     * Used as a handle for the HiveShell field in the test case so that we may set it once the\n     * HiveShell has been instantiated.\n     */\n    interface HiveShellField {\n\n        void setShell(HiveShell shell);\n\n        boolean isAutoStart();\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveRunnerExtension.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.reflections.ReflectionUtils.withAnnotation;\nimport static org.reflections.ReflectionUtils.withType;\n\nimport java.io.IOException;\nimport java.io.UncheckedIOException;\nimport java.lang.reflect.Field;\nimport java.net.URISyntaxException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Set;\n\nimport org.apache.commons.io.FileUtils;\nimport org.junit.jupiter.api.extension.AfterEachCallback;\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.api.extension.TestInstancePostProcessor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.google.common.base.Preconditions;\nimport com.google.common.base.Predicates;\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.builder.Script;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.reflection.ReflectionUtils;\n\npublic class HiveRunnerExtension implements AfterEachCallback, TestInstancePostProcessor {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(HiveRunnerExtension.class);\n\n    private final HiveRunnerCore core;\n    private final HiveRunnerConfig config = new HiveRunnerConfig();\n    private Path basedir;\n    private HiveShellContainer container;\n    protected List<Script> scriptsUnderTest = new ArrayList<Script>();\n\n    public HiveRunnerExtension() {\n        core = new HiveRunnerCore();\n    }\n\n    protected List<Path> getScriptPaths(HiveSQL annotation) throws URISyntaxException {\n        return core.getScriptPaths(annotation);\n    }\n\n    @Override\n    public void postProcessTestInstance(Object target, ExtensionContext extensionContext) {\n        setupConfig(target);\n        try {\n            basedir = Files.createTempDirectory(\"hiverunner_test\");\n            container = createHiveServerContainer(scriptsUnderTest, target, basedir);\n        } catch (IOException e) {\n            throw new UncheckedIOException(e);\n        }\n        scriptsUnderTest = container.getScriptsUnderTest();\n    }\n\n    private void setupConfig(Object target) {\n        Set<Field> fields = ReflectionUtils.getAllFields(target.getClass(),\n                Predicates.and(\n                        withAnnotation(HiveRunnerSetup.class),\n                        withType(HiveRunnerConfig.class)));\n\n        Preconditions.checkState(fields.size() <= 1,\n                \"Only one field of type HiveRunnerConfig should be annotated with @HiveRunnerSetup\");\n\n        if (!fields.isEmpty()) {\n            config.override(ReflectionUtils\n                    .getFieldValue(target, fields.iterator().next().getName(), HiveRunnerConfig.class));\n        }\n    }\n\n    private void tearDown(Object target) {\n        if (container != null) {\n            LOGGER.info(\"Tearing down {}\", target.getClass());\n            container.tearDown();\n        }\n        deleteTempFolder(basedir);\n    }\n\n    private void deleteTempFolder(Path directory) {\n        try {\n            FileUtils.deleteDirectory(directory.toFile());\n        } catch (IOException e) {\n            LOGGER.debug(\"Temporary folder was not deleted successfully: \" + directory);\n        }\n    }\n\n    private HiveShellContainer createHiveServerContainer(List<? extends Script> scripts, Object testCase, Path basedir)\n            throws IOException {\n        return core.createHiveServerContainer(scripts, testCase, basedir, config);\n    }\n\n    @Override\n    public void afterEach(ExtensionContext extensionContext) {\n        tearDown(extensionContext.getRequiredTestInstance());\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveRunnerRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport java.nio.file.Path;\n\nimport org.junit.rules.TestRule;\nimport org.junit.runner.Description;\nimport org.junit.runners.model.Statement;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.klarna.hiverunner.builder.Script;\n\n/**\n * A rule that executes the scripts under test\n */\npublic class HiveRunnerRule implements TestRule {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(HiveRunnerRule.class);\n    private final StandaloneHiveRunner runner;\n    private final Object target;\n    private final Path testBaseDir;\n    private List<? extends Script> scriptsUnderTest = new ArrayList<>();\n\n    HiveRunnerRule(StandaloneHiveRunner runner, Object target, Path testBaseDir) {\n        this.runner = runner;\n        this.target = target;\n        this.testBaseDir = testBaseDir;\n    }\n\n    public List<? extends Script> getScriptsUnderTest() {\n        return scriptsUnderTest;\n    }\n\n    public void setScriptsUnderTest(List<? extends Script> scriptsUnderTest) {\n        LOGGER.debug(\"Setting up hive runner scripts under test\");\n        this.scriptsUnderTest = scriptsUnderTest;\n    }\n\n    @Override\n    public Statement apply(Statement base, Description description) {\n        LOGGER.debug(\"Running hive runner rule apply\");\n        return new HiveRunnerRuleStatement(runner, target, base, testBaseDir);\n    }\n\n    class HiveRunnerRuleStatement extends Statement {\n\n        private Object target;\n        private Statement base;\n        private Path testBaseDir;\n        private StandaloneHiveRunner runner;\n\n        private HiveRunnerRuleStatement(\n                StandaloneHiveRunner runner,\n                Object target,\n                Statement base,\n                Path testBaseDir) {\n            this.runner = runner;\n            this.target = target;\n            this.base = base;\n            this.testBaseDir = testBaseDir;\n        }\n\n        @Override\n        public void evaluate() throws Throwable {\n            LOGGER.debug(\"Hive runner rule evaluate method\");\n            HiveShellContainer container = runner.evaluateStatement(scriptsUnderTest, target, testBaseDir, base);\n\n            /**\n             * Script list will initially be null. 'evaluateStatement' sets up the script list.\n             * Need to set the value here to allow for mutation inside the mutantSwarmRule.\n             */\n            scriptsUnderTest = container.getScriptsUnderTest();\n        }\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveServerContainer.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.google.common.base.Function;\nimport com.google.common.base.Joiner;\nimport com.google.common.base.Preconditions;\nimport com.google.common.collect.Iterables;\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.io.IgnoreClosePrintStream;\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.hadoop.hive.conf.HiveVariableSource;\nimport org.apache.hadoop.hive.conf.VariableSubstitution;\nimport org.apache.hadoop.hive.ql.exec.tez.TezJobExecHelper;\nimport org.apache.hadoop.hive.ql.session.SessionState;\nimport org.apache.hive.service.Service;\nimport org.apache.hive.service.cli.CLIService;\nimport org.apache.hive.service.cli.HiveSQLException;\nimport org.apache.hive.service.cli.OperationHandle;\nimport org.apache.hive.service.cli.RowSet;\nimport org.apache.hive.service.cli.SessionHandle;\nimport org.apache.hive.service.server.HiveServer2;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.PrintStream;\nimport java.nio.file.Path;\nimport javax.annotation.Nullable;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * HiveServer wrapper\n */\npublic class HiveServerContainer {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(HiveServerContainer.class);\n\n    private CLIService client;\n    private final HiveServerContext context;\n    private SessionHandle sessionHandle;\n    private HiveServer2 hiveServer2;\n    private SessionState currentSessionState;\n\n    public HiveServerContainer(HiveServerContext context) {\n        this.context = context;\n    }\n\n    public CLIService getClient() {\n        return client;\n    }\n\n    /**\n     * Will start the HiveServer.\n     *\n     * @param testConfig Specific test case properties. Will be merged with the HiveConf of the context\n     * @param hiveVars   HiveVars to pass on to the HiveServer for this session\n     */\n    public void init(Map<String, String> testConfig, Map<String, String> hiveVars) {\n\n        context.init();\n\n        HiveConf hiveConf = context.getHiveConf();\n\n        // merge test case properties with hive conf before HiveServer is started.\n        for (Map.Entry<String, String> property : testConfig.entrySet()) {\n            hiveConf.set(property.getKey(), property.getValue());\n        }\n\n        try {\n            hiveServer2 = new HiveServer2();\n            hiveServer2.init(hiveConf);\n\n            // Locate the ClIService in the HiveServer2\n            for (Service service : hiveServer2.getServices()) {\n                if (service instanceof CLIService) {\n                    client = (CLIService) service;\n                }\n            }\n\n            Preconditions.checkNotNull(client, \"ClIService was not initialized by HiveServer2\");\n\n            sessionHandle = client.openSession(\"noUser\", \"noPassword\", null);\n\n            SessionState sessionState = client.getSessionManager().getSession(sessionHandle).getSessionState();\n            currentSessionState = sessionState;\n            currentSessionState.setHiveVariables(hiveVars);\n        } catch (Exception e) {\n            throw new IllegalStateException(\"Failed to create HiveServer :\" + e.getMessage(), e);\n        }\n\n        // Ping hive server before we do anything more with it! If validation\n        // is switched on, this will fail if metastorage is not set up properly\n        pingHiveServer();\n    }\n\n    public Path getBaseDir() {\n        return context.getBaseDir();\n    }\n\n    public List<Object[]> executeStatement(Statement hiveql) {\n        return executeStatement(hiveql.getSql());\n    }\n\n    public List<Object[]> executeStatement(String hiveql) {\n        // This PrintStream hack can be removed if/when IntelliJ fixes https://youtrack.jetbrains.com/issue/IDEA-120628\n        // See https://github.com/klarna/HiveRunner/issues/94 for more info.\n        PrintStream initialPrintStream = System.out;\n        try {\n            System.setOut(new IgnoreClosePrintStream(System.out));\n            OperationHandle handle = client.executeStatement(sessionHandle, hiveql, new HashMap<>());\n            List<Object[]> resultSet = new ArrayList<>();\n            if (handle.hasResultSet()) {\n                /*\n                 * fetchResults will by default return 100 rows per fetch (hive 14). For big result sets we need to continuously fetch the result set until all\n                 * rows are fetched.\n                 */\n                RowSet rowSet;\n                while ((rowSet = client.fetchResults(handle)) != null && rowSet.numRows() > 0) {\n                    for (Object[] row : rowSet) {\n                        resultSet.add(row.clone());\n                    }\n                }\n            }\n\n            LOGGER.debug(\"ResultSet:\\n\"\n                    + Joiner.on(\"\\n\").join(Iterables.transform(resultSet, new Function<Object[], String>() {\n                @Nullable\n                @Override\n                public String apply(@Nullable Object[] objects) {\n                    return Joiner.on(\", \").useForNull(\"null\").join(objects);\n                }\n            })));\n\n            return resultSet;\n        } catch (HiveSQLException e) {\n            throw new IllegalArgumentException(\"Failed to executeQuery Hive query \" + hiveql + \": \" + e.getMessage(),\n                    e);\n        } finally {\n            System.setOut(initialPrintStream);\n        }\n    }\n\n    /**\n     * Release all resources.\n     * <p>\n     * This call will never throw an exception as it makes no sense doing that in the tear down phase.\n     * </p>\n     */\n    public void tearDown() {\n\n        try {\n            TezJobExecHelper.killRunningJobs();\n        } catch (Throwable e) {\n            LOGGER.warn(\"Failed to kill tez session: \" + e.getMessage() + \". Turn on log level debug for stacktrace\");\n            LOGGER.debug(e.getMessage(), e);\n        }\n\n        try {\n            // Reset to default schema\n            executeStatement(\"USE default\");\n        } catch (Throwable e) {\n            LOGGER.warn(\"Failed to reset to default schema: \" + e.getMessage()\n                    + \". Turn on log level debug for stacktrace\");\n            LOGGER.debug(e.getMessage(), e);\n        }\n\n        try {\n            client.closeSession(sessionHandle);\n        } catch (Throwable e) {\n            LOGGER.warn(\n                    \"Failed to close client session: \" + e.getMessage() + \". Turn on log level debug for stacktrace\");\n            LOGGER.debug(e.getMessage(), e);\n        }\n\n        try {\n            hiveServer2.stop();\n        } catch (Throwable e) {\n            LOGGER.warn(\"Failed to stop HiveServer2: \" + e.getMessage() + \". Turn on log level debug for stacktrace\");\n            LOGGER.debug(e.getMessage(), e);\n        }\n\n        hiveServer2 = null;\n        client = null;\n        sessionHandle = null;\n\n        LOGGER.info(\"Tore down HiveServer instance\");\n    }\n\n    public String expandVariableSubstitutes(String expression) {\n        return getVariableSubstitution().substitute(getHiveConf(), expression);\n    }\n\n    private void pingHiveServer() {\n        executeStatement(\"SHOW TABLES\");\n    }\n\n    public HiveConf getHiveConf() {\n        return hiveServer2.getHiveConf();\n    }\n\n    public VariableSubstitution getVariableSubstitution() {\n        // Make sure to set the session state for this thread before returning the VariableSubstitution. If not set,\n        // hivevar:s will not be evaluated.\n        SessionState.setCurrentSessionState(currentSessionState);\n\n        SessionState ss = currentSessionState;\n        return new VariableSubstitution(new HiveVariableSource() {\n            @Override\n            public Map<String, String> getHiveVariable() {\n                return ss.getHiveVariables();\n            }\n        });\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveServerContext.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport java.nio.file.Path;\nimport java.util.Map;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\n\n/**\n * Configuration for the HiveServer.\n *\n * Implementations of this interface should set the context of the HiveServer that is spawned by HiveRunner. {@link\n * com.klarna.hiverunner.StandaloneHiveRunner} uses the {@link StandaloneHiveServerContext} to create a context with\n * zero external dependencies.\n *\n * By implementing other contexts you may e.g. point hiveserver to a different metastore, pre installed external hadoop\n * instance etc.\n */\npublic interface HiveServerContext {\n\n    /**\n     * Create all test resources and set all hive configurations.\n     *\n     * Note that before this method is called, not all injected dependencies might have been initialized.\n     * After this method is called, all configurations and resources should have been set.\n     *\n     * Called by {@link HiveServerContainer#init(Map, Map)}\n     */\n    void init();\n\n    /**\n     * Get the hiveconf. This will not be available until init() has been called.\n     */\n    HiveConf getHiveConf();\n\n    /**\n     * Get file folder that acts as the base dir for the test data. This is the sand box for the\n     * file system that the HiveRunner uses as replacement for HDFS.\n     * <p>\n     * Each test method will have a new base dir spawned by the HiveRunner engine.\n     * </p>\n     */\n    Path getBaseDir();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveShell.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.data.InsertIntoTable;\nimport org.apache.hadoop.hive.conf.HiveConf;\n\nimport java.io.File;\nimport java.io.OutputStream;\nimport java.nio.charset.Charset;\nimport java.nio.file.Path;\nimport java.util.List;\n\n/**\n * Test handle to the hive server.\n *\n * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n */\npublic interface HiveShell {\n\n    /**\n     * Executes a single query.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(String hiveSql);\n\n    /**\n     * Executes a single query.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(String hiveSql, String rowValuesDelimitedBy, String replaceNullWith);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(File script);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Path script);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Charset charset, File script);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Charset charset, Path script);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(File script, String rowValuesDelimitedBy, String replaceNullWith);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Path script, String rowValuesDelimitedBy, String replaceNullWith);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Charset charset, File script, String rowValuesDelimitedBy, String replaceNullWith);\n\n    /**\n     * Executes a single query from a script file, returning any results.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<String> executeQuery(Charset charset, Path script, String rowValuesDelimitedBy, String replaceNullWith);\n\n    /**\n     * Execute a single hive query\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    List<Object[]> executeStatement(String hiveSql);\n\n    /**\n     * Executes a hive script. The script may contain multiple statements delimited by ';'\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    void execute(String script);\n\n    /**\n     * Executes a hive script. The script may contain multiple statements delimited by ';'.\n     * Default charset will be used to read the given files.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    void execute(File file);\n\n    /**\n     * Executes a hive script. The script may contain multiple statements delimited by ';'.\n     * Default charset will be used to read the given files.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    void execute(Path path);\n\n    /**\n     * Executes a hive script. The script may contain multiple statements delimited by ';'\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    void execute(Charset charset, File file);\n\n    /**\n     * Executes a hive script. The script may contain multiple statements delimited by ';'\n     * <p>\n     * May only be called post #start()\n     * </p>\n     */\n    void execute(Charset charset, Path path);\n\n    /**\n     * Start the shell. May only be called once. The test engine will by default call this method,\n     * Set {@link com.klarna.hiverunner.annotations.HiveSQL#autoStart()} to false to explicitly control\n     * when to start from the test case.\n     * <p>\n     * This might be useful for test methods that needs additional setup not catered for with the provided annotations.\n     * </p>\n     */\n    void start();\n\n    /**\n     * Set a HiveConf property.\n     * <p>\n     * May only be called pre #start()\n     * </p>\n     * @deprecated Use {@link HiveShell#setHiveConfValue(String, String)} instead\n     */\n    @Deprecated\n    void setProperty(String key, String value);\n\n    /**\n     * Set HiveConf property.\n     * <p>\n     * May only be called pre #start()\n     * </p>\n     */\n    void setHiveConfValue(String key, String value);\n\n    /**\n     * Set Hive variable.\n     * <p>\n     * May only be called pre #start()\n     * </p>\n     */\n    void setHiveVarValue(String var, String value);\n\n    /**\n     * Get the current HiveConf from hive\n     */\n    HiveConf getHiveConf();\n\n    void setCwd(Path cwd);\n\n    Path getCwd();\n\n    /**\n     * Copy test data into hdfs\n     * May only be called pre #start()\n     * <p>\n     * {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsFileTest()}\n     * and {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsStringTest()}\n     * </p>\n     */\n    void addResource(String targetFile, File sourceFile);\n\n    /**\n     * Copy test data into hdfs\n     * May only be called pre #start()\n     * <p>\n     * {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsFileTest()}\n     * and {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsStringTest()}\n     * </p>\n     */\n    void addResource(String targetFile, Path sourceFile);\n\n\n    /**\n     * Copy test data into hdfs\n     * May only be called pre #start()\n     * <p>\n     * {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsFileTest()}\n     * and {@link com.klarna.hiverunner.MethodLevelResourceTest#resourceLoadingAsStringTest()}\n     * </p>\n     */\n    void addResource(String targetFile, String data);\n\n    /**\n     * Add a hive script that will be executed when the hive shell is started\n     * Scripts will be executed in the order they are added.\n     *\n     * Note that execution order is not guaranteed with\n     * fields annotated with {@link com.klarna.hiverunner.annotations.HiveSetupScript}\n     */\n    void addSetupScript(String script);\n\n    /**\n     * Add hive scripts that will be executed when the hive shell is started. Scripts will be executed in given order.\n     *\n     * Note that execution order is not guaranteed with\n     * fields annotated with {@link com.klarna.hiverunner.annotations.HiveSetupScript}\n     */\n    void addSetupScripts(Charset charset, File... scripts);\n\n    /**\n     * Add hive scripts that will be executed when the hive shell is started. Scripts will be executed in given order.\n     *\n     * Note that execution order is not guaranteed with\n     * fields annotated with {@link com.klarna.hiverunner.annotations.HiveSetupScript}\n     */\n    void addSetupScripts(Charset charset, Path... scripts);\n\n\n    /**\n     * Add hive scripts that will be executed when the hive shell is started. Scripts will be executed in given order.\n     *\n     * Default charset will be used to read the given files\n     *\n     * Note that execution order is not guaranteed with\n     * fields annotated with {@link com.klarna.hiverunner.annotations.HiveSetupScript}\n     */\n    void addSetupScripts(File... scripts);\n\n    /**\n     * Add hive scripts that will be executed when the hive shell is started. Scripts will be executed in given order.\n     *\n     * Default charset will be used to read the given files\n     *\n     * Note that execution order is not guaranteed with\n     * fields annotated with {@link com.klarna.hiverunner.annotations.HiveSetupScript}\n     */\n    void addSetupScripts(Path... scripts);\n\n\n    /**\n     * Get the test case sand box base dir\n     */\n    Path getBaseDir();\n\n    /**\n     * Resolve all substituted variables with the hive conf.\n     * @throws IllegalArgumentException if not all substitutes could be resolved\n     * @throws IllegalStateException    if the HiveShell was not started yet.\n     */\n    String expandVariableSubstitutes(String expression);\n\n    /**\n     * Open up a stream to write test data into HDFS.\n     *\n     * May only be called pre #start().\n     * No writes to the stream will be allowed post #start().\n     *\n     * @param targetFile The path to the target file relative to the hive work space.\n     *\n     * See test class {@code com.klarna.hiverunner.ResourceOutputStreamTest#sequenceFile()} for an example of how this works.\n     * with sequence files.\n     */\n    OutputStream getResourceOutputStream(String targetFile);\n\n    /**\n     * Returns an {@link InsertIntoTable} that allows programmatically inserting data into a table in a fluent manner.\n     * <p>\n     * May only be called post #start()\n     * </p>\n     *\n     * @param databaseName The database name\n     * @param tableName The table name\n     */\n    InsertIntoTable insertInto(String databaseName, String tableName);\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/HiveShellContainer.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport java.util.List;\n\nimport com.klarna.hiverunner.builder.Script;\n\n/**\n * Wrapper for the HiveShell that allows the fwk to sugar the HiveShell with functionality that will not be exposed to\n * the test case creator.\n */\npublic interface HiveShellContainer extends HiveShell {\n\n    /**\n     * Should be called after execution of each test method and should tear down the test fixture leaving\n     * no residue for coming test cases.\n     */\n    void tearDown();\n\n    /**\n     * Returns a List of the scripts being tested. \n     */\n    List<Script> getScriptsUnderTest();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/StandaloneHiveRunner.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.google.common.base.Preconditions;\nimport com.google.common.base.Predicates;\nimport com.klarna.hiverunner.annotations.*;\nimport com.klarna.hiverunner.builder.Script;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.reflection.ReflectionUtils;\n\nimport org.apache.commons.io.FileUtils;\nimport org.apache.hadoop.fs.FileUtil;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.junit.Ignore;\nimport org.junit.internal.AssumptionViolatedException;\nimport org.junit.internal.runners.model.EachTestNotifier;\nimport org.junit.rules.TestRule;\nimport org.junit.runner.Description;\nimport org.junit.runner.notification.RunNotifier;\nimport org.junit.runners.BlockJUnit4ClassRunner;\nimport org.junit.runners.model.FrameworkMethod;\nimport org.junit.runners.model.InitializationError;\nimport org.junit.runners.model.Statement;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.slf4j.MDC;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.UncheckedIOException;\nimport java.lang.reflect.Field;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Set;\n\nimport static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.HIVE_IN_TEST;\nimport static org.reflections.ReflectionUtils.withAnnotation;\nimport static org.reflections.ReflectionUtils.withType;\n\n/**\n * JUnit 4 runner that runs hive sql on a HiveServer residing in this JVM. No external dependencies needed.\n */\npublic class StandaloneHiveRunner extends BlockJUnit4ClassRunner {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(StandaloneHiveRunner.class);\n\n    private HiveShellContainer container;\n\n    /**\n     * We need to init config because we're going to pass\n     * it around before it is actually fully loaded from the testcase.\n     */\n    private final HiveRunnerConfig config = new HiveRunnerConfig();\n\n    public StandaloneHiveRunner(Class<?> clazz) throws InitializationError {\n        super(clazz);\n    }\n\n    protected HiveRunnerConfig getHiveRunnerConfig() {\n        return config;\n    }\n\n    @Override\n    protected List<TestRule> getTestRules(Object target) {\n        Path testBaseDir = null;\n        try {\n            testBaseDir = Files.createTempDirectory(\"hiverunner_tests\");\n        } catch (IOException e) {\n            throw new UncheckedIOException(e);\n        }\n\n        HiveRunnerRule hiveRunnerRule = new HiveRunnerRule(this, target, testBaseDir);\n\n        /*\n         * Note that rules will be executed in reverse order to how they're added.\n         */\n\n        List<TestRule> rules = new ArrayList<>();\n        rules.addAll(super.getTestRules(target));\n        rules.add(hiveRunnerRule);\n        rules.add(ThrowOnTimeout.create(config, getName()));\n\n        /*\n         Make sure hive runner config rule is the first rule on the list to be executed so that any subsequent\n         statements has access to the final config.\n          */\n        rules.add(getHiveRunnerConfigRule(target));\n        return rules;\n    }\n\n    @Override\n    protected void runChild(FrameworkMethod method, RunNotifier notifier) {\n        Description description = describeChild(method);\n        if (method.getAnnotation(Ignore.class) != null) {\n            notifier.fireTestIgnored(description);\n        } else {\n            setLogContext(method);\n            EachTestNotifier eachNotifier = new EachTestNotifier(notifier, description);\n            eachNotifier.fireTestStarted();\n            try {\n                runTestMethod(method, eachNotifier, config.getTimeoutRetries());\n            } finally {\n                eachNotifier.fireTestFinished();\n                clearLogContext();\n            }\n        }\n    }\n\n    /**\n     * Runs a {@link Statement} that represents a leaf (aka atomic) test.\n     */\n    protected final void runTestMethod(FrameworkMethod method,\n                                       EachTestNotifier notifier, int retriesLeft) {\n\n        Statement statement = methodBlock(method);\n\n        try {\n            statement.evaluate();\n        } catch (AssumptionViolatedException e) {\n            notifier.addFailedAssumption(e);\n        } catch (TimeoutException e) {\n            /*\n             TimeoutException thrown by ThrowOnTimeout statement. Handling is kept in this class since this is where the\n             retry needs to be triggered in order to get the right tear down and test setup between retries.\n              */\n            if (--retriesLeft >= 0) {\n                LOGGER.warn(\n                        \"Test case timed out. Will attempt retry {} more times. Turn on log level DEBUG for stacktrace\",\n                        retriesLeft);\n                LOGGER.debug(e.getMessage(), e);\n                tearDown();\n                runTestMethod(method, notifier, retriesLeft);\n            } else {\n                notifier.addFailure(e);\n            }\n        } catch (Throwable e) {\n            notifier.addFailure(e);\n        }\n    }\n\n    /**\n     * Drives the unit test.\n     */\n    public HiveShellContainer evaluateStatement(List<? extends Script> scripts, Object target,\n                                                Path temporaryFolder, Statement base) throws Throwable {\n        container = null;\n        File temporaryFile = temporaryFolder.toFile();\n        if (!temporaryFile.exists()) {\n            temporaryFile.mkdirs();\n        }\n        FileUtil.setPermission(temporaryFile, FsPermission.getDirDefault());\n        try {\n            LOGGER.info(\"Setting up {} in {}\", getName(), temporaryFolder.getRoot());\n            container = createHiveServerContainer(scripts, target, temporaryFolder);\n            base.evaluate();\n            return container;\n        } finally {\n            tearDown();\n        }\n    }\n\n    private void tearDown() {\n        tearDownContainer();\n        if (container != null) {\n            deleteTempFolder(container.getBaseDir());\n        }\n    }\n\n    private void tearDownContainer() {\n        if (container != null) {\n            LOGGER.info(\"Tearing down {}\", getName());\n            try {\n                container.tearDown();\n            } catch (Throwable e) {\n                LOGGER.warn(\"Tear down failed: \" + e.getMessage(), e);\n            }\n        }\n    }\n\n    private void deleteTempFolder(Path directory) {\n        try {\n            FileUtils.deleteDirectory(directory.toFile());\n        } catch (IOException e) {\n            LOGGER.debug(\"Temporary folder was not deleted successfully: \" + directory);\n        }\n    }\n\n    /**\n     * Traverses the test case annotations. Will inject a HiveShell in the test case that envelopes the HiveServer.\n     */\n    private HiveShellContainer createHiveServerContainer(List<? extends Script> scripts, Object testCase,\n                                                         Path baseDir)\n            throws IOException {\n        HiveRunnerCore core = new HiveRunnerCore();\n        return core.createHiveServerContainer(scripts, testCase, baseDir, config);\n    }\n\n    private TestRule getHiveRunnerConfigRule(Object target) {\n        return new TestRule() {\n            @Override\n            public Statement apply(Statement base, Description description) {\n                Set<Field> fields = ReflectionUtils.getAllFields(target.getClass(),\n                        Predicates.and(\n                                withAnnotation(HiveRunnerSetup.class),\n                                withType(HiveRunnerConfig.class)));\n\n                Preconditions.checkState(fields.size() <= 1,\n                        \"Exact one field of type HiveRunnerConfig should to be annotated with @HiveRunnerSetup\");\n\n                /*\n                 Override the config with test case config. Taking care to not replace the config instance since it\n                  has been passes around and referenced by some of the other test rules.\n                  */\n                if (!fields.isEmpty()) {\n                    config.override(ReflectionUtils\n                            .getFieldValue(target, fields.iterator().next().getName(), HiveRunnerConfig.class));\n                }\n                return base;\n            }\n        };\n    }\n\n    private void clearLogContext() {\n        MDC.clear();\n    }\n\n    private void setLogContext(FrameworkMethod method) {\n        MDC.put(\"testClassShort\", getTestClass().getJavaClass().getSimpleName());\n        MDC.put(\"testClass\", getTestClass().getJavaClass().getName());\n        MDC.put(\"testMethod\", method.getName());\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/StandaloneHiveServerContext.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HADOOPBIN;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVECONVERTJOIN;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVEHISTORYFILELOC;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVEMETADATAONLYQUERIES;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVEOPTINDEXFILTER;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESKEWJOIN;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSAUTOGATHER;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_CBO_ENABLED;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_INFER_BUCKET_SORT;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.LOCALSCRATCHDIR;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORECONNECTURLKEY;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREWAREHOUSE;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORE_VALIDATE_COLUMNS;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORE_VALIDATE_CONSTRAINTS;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORE_VALIDATE_TABLES;\nimport static org.apache.hadoop.hive.conf.HiveConf.ConfVars.SCRATCHDIR;\nimport static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.HIVE_IN_TEST;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.UncheckedIOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Map;\nimport java.util.UUID;\n\nimport org.apache.hadoop.fs.FileUtil;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.tez.dag.api.TezConfiguration;\nimport org.apache.tez.runtime.library.api.TezRuntimeConfiguration;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\n\n/**\n * Responsible for common configuration for running the HiveServer within this JVM with zero external dependencies.\n * <p>\n * This class contains a bunch of methods meant to be overridden in order to create slightly different contexts.\n * </p>\n * <p>\n * This context configures HiveServer for both mr and tez. There's nothing contradicting with those configurations so\n * they may coexist in order to allow test cases to alter execution engines within the same test by e.g: 'set\n * hive.execution.engine=tez;'.\n * </p>\n */\npublic class StandaloneHiveServerContext implements HiveServerContext {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(StandaloneHiveServerContext.class);\n\n    private String metaStorageUrl;\n\n    protected HiveConf hiveConf = new HiveConf();\n\n    private final Path basedir;\n    private final HiveRunnerConfig hiveRunnerConfig;\n\n    public StandaloneHiveServerContext(Path basedir, HiveRunnerConfig hiveRunnerConfig) {\n        this.basedir = basedir;\n        this.hiveRunnerConfig = hiveRunnerConfig;\n    }\n\n    @Override\n    public final void init() {\n\n        configureMiscHiveSettings(hiveConf);\n\n        configureMetaStore(hiveConf);\n\n        configureMrExecutionEngine(hiveConf);\n\n        configureTezExecutionEngine(hiveConf);\n\n        configureJavaSecurityRealm(hiveConf);\n\n        configureSupportConcurrency(hiveConf);\n\n        try {\n            configureFileSystem(basedir, hiveConf);\n        } catch (IOException e) {\n            throw new UncheckedIOException(e);\n        }\n\n        configureAssertionStatus(hiveConf);\n\n        overrideHiveConf(hiveConf);\n    }\n\n    protected void configureMiscHiveSettings(HiveConf hiveConf) {\n        hiveConf.setBoolVar(HIVESTATSAUTOGATHER, false);\n\n        // Turn of dependency to calcite library\n        hiveConf.setBoolVar(HIVE_CBO_ENABLED, false);\n\n        // Disable to get rid of clean up exception when stopping the Session.\n        hiveConf.setBoolVar(HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false);\n\n        hiveConf.setVar(HADOOPBIN, \"NO_BIN!\");\n    }\n\n    protected void overrideHiveConf(HiveConf hiveConf) {\n        for (Map.Entry<String, String> hiveConfEntry : hiveRunnerConfig.getHiveConfSystemOverride().entrySet()) {\n            hiveConf.set(hiveConfEntry.getKey(), hiveConfEntry.getValue());\n        }\n    }\n\n    protected void configureMrExecutionEngine(HiveConf conf) {\n        /*\n         * Switch off all optimizers otherwise we didn't manage to contain the map reduction within this JVM.\n         */\n        conf.setBoolVar(HIVE_INFER_BUCKET_SORT, false);\n        conf.setBoolVar(HIVEMETADATAONLYQUERIES, false);\n        conf.setBoolVar(HIVEOPTINDEXFILTER, false);\n        conf.setBoolVar(HIVECONVERTJOIN, false);\n        conf.setBoolVar(HIVESKEWJOIN, false);\n\n        // Defaults to a 1000 millis sleep in. We can speed up the tests a bit by setting this to 1 millis instead.\n        // org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper.\n        hiveConf.setLongVar(HiveConf.ConfVars.HIVECOUNTERSPULLINTERVAL, 1L);\n\n        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true);\n    }\n\n    protected void configureTezExecutionEngine(HiveConf conf) {\n        /*\n         * Tez local mode settings\n         */\n        conf.setBoolean(TezConfiguration.TEZ_LOCAL_MODE, true);\n        conf.set(\"fs.defaultFS\", \"file:///\");\n        conf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH, true);\n\n        /*\n         * Set to be able to run tests offline\n         */\n        conf.set(TezConfiguration.TEZ_AM_DISABLE_CLIENT_VERSION_CHECK, \"true\");\n\n        /*\n         * General attempts to strip of unnecessary functionality to speed up test execution and increase stability\n         */\n        conf.set(TezConfiguration.TEZ_AM_USE_CONCURRENT_DISPATCHER, \"false\");\n        conf.set(TezConfiguration.TEZ_AM_CONTAINER_REUSE_ENABLED, \"false\");\n        conf.set(TezConfiguration.DAG_RECOVERY_ENABLED, \"false\");\n        conf.set(TezConfiguration.TEZ_TASK_GET_TASK_SLEEP_INTERVAL_MS_MAX, \"1\");\n        conf.set(TezConfiguration.TEZ_AM_WEBSERVICE_ENABLE, \"false\");\n        conf.set(TezConfiguration.DAG_RECOVERY_ENABLED, \"false\");\n        conf.set(TezConfiguration.TEZ_AM_NODE_BLACKLISTING_ENABLED, \"false\");\n    }\n\n    protected void configureJavaSecurityRealm(HiveConf hiveConf) {\n        // These three properties gets rid of: 'Unable to load realm info from SCDynamicStore'\n        // which seems to have a timeout of about 5 secs.\n        System.setProperty(\"java.security.krb5.realm\", \"\");\n        System.setProperty(\"java.security.krb5.kdc\", \"\");\n        System.setProperty(\"java.security.krb5.conf\", \"/dev/null\");\n    }\n\n    protected void configureAssertionStatus(HiveConf conf) {\n        ClassLoader\n                .getSystemClassLoader()\n                .setPackageAssertionStatus(\"org.apache.hadoop.hive.serde2.objectinspector\", false);\n    }\n\n    protected void configureSupportConcurrency(HiveConf conf) {\n        hiveConf.setBoolVar(HIVE_SUPPORT_CONCURRENCY, false);\n    }\n\n    protected void configureMetaStore(HiveConf conf) {\n        configureDerbyLog();\n\n        String jdbcDriver = org.apache.derby.jdbc.EmbeddedDriver.class.getName();\n        try {\n            Class.forName(jdbcDriver);\n        } catch (ClassNotFoundException e) {\n            throw new RuntimeException(e);\n        }\n\n        // Set the Hive Metastore DB driver\n        metaStorageUrl = \"jdbc:derby:memory:\" + UUID.randomUUID().toString();\n        setMetastoreProperty(\"datanucleus.schema.autoCreateAll\", \"true\");\n        setMetastoreProperty(\"datanucleus.schema.autoCreateTables\", \"true\");\n        setMetastoreProperty(\"hive.metastore.schema.verification\", \"false\");\n        setMetastoreProperty(\"metastore.filter.hook\", \"org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl\");\n\n        setMetastoreProperty(\"datanucleus.connectiondrivername\", jdbcDriver);\n        setMetastoreProperty(\"javax.jdo.option.ConnectionDriverName\", jdbcDriver);\n\n        // No pooling needed. This will save us a lot of threads\n        setMetastoreProperty(\"datanucleus.connectionPoolingType\", \"None\");\n\n        /**\n         * If hive.in.test=false (default), Hive 3 will assume that the metastore rdbms has already been initialized\n         * with some basic tables and will try to run initial test queries against them.\n         * This results in multiple warning stacktraces if the rdbms has not actually been initialized.\n         */\n        setMetastoreProperty(HIVE_IN_TEST.getVarname(), \"true\");\n\n        setMetastoreProperty(METASTORE_VALIDATE_CONSTRAINTS.varname, \"true\");\n        setMetastoreProperty(METASTORE_VALIDATE_COLUMNS.varname, \"true\");\n        setMetastoreProperty(METASTORE_VALIDATE_TABLES.varname, \"true\");\n    }\n\n    private void configureDerbyLog() {\n        // overriding default derby log path to not go to root of project\n        File derbyLogFile;\n        try {\n            derbyLogFile = File.createTempFile(\"derby\", \".log\");\n            LOGGER.debug(\"Derby set to log to \" + derbyLogFile.getAbsolutePath());\n        } catch (IOException e) {\n            throw new UncheckedIOException(\"Error creating temporary derby log file\", e);\n        }\n        System.setProperty(\"derby.stream.error.file\", derbyLogFile.getAbsolutePath());\n    }\n\n    protected void configureFileSystem(Path basedir, HiveConf conf) throws IOException {\n        setMetastoreProperty(METASTORECONNECTURLKEY.varname, metaStorageUrl + \";create=true\");\n\n        createAndSetFolderProperty(METASTOREWAREHOUSE, \"warehouse\", conf, basedir);\n        createAndSetFolderProperty(SCRATCHDIR, \"scratchdir\", conf, basedir);\n        createAndSetFolderProperty(LOCALSCRATCHDIR, \"localscratchdir\", conf, basedir);\n        createAndSetFolderProperty(HIVEHISTORYFILELOC, \"tmp\", conf, basedir);\n\n        createAndSetFolderProperty(\"hadoop.tmp.dir\", \"hadooptmp\", conf, basedir);\n        createAndSetFolderProperty(\"test.log.dir\", \"logs\", conf, basedir);\n\n        /*\n         * Tez specific configurations below\n         */\n        /*\n         * Tez will upload a hive-exec.jar to this location. It looks like it will do this only once per test suite so it\n         * makes sense to keep this in a central location rather than in the tmp dir of each test.\n         */\n        File installation_dir = newFolder(basedir, \"tez_installation_dir\").toFile();\n\n        conf.setVar(HiveConf.ConfVars.HIVE_JAR_DIRECTORY, installation_dir.getAbsolutePath());\n        conf.setVar(HiveConf.ConfVars.HIVE_USER_INSTALL_DIR, installation_dir.getAbsolutePath());\n    }\n\n    Path newFolder(Path basedir, String folder) throws IOException {\n        Path newFolder = Files.createTempDirectory(basedir, folder);\n        FileUtil.setPermission(newFolder.toFile(), FsPermission.getDirDefault());\n        return newFolder;\n    }\n\n    @Override\n    public HiveConf getHiveConf() {\n        return hiveConf;\n    }\n\n    @Override\n    public Path getBaseDir() {\n        return basedir;\n    }\n\n    protected final void createAndSetFolderProperty(HiveConf.ConfVars var, String folder, HiveConf conf, Path basedir)\n            throws IOException {\n        setMetastoreProperty(var.varname, newFolder(basedir, folder).toAbsolutePath().toString());\n    }\n\n    protected final void createAndSetFolderProperty(String key, String folder, HiveConf conf, Path basedir)\n            throws IOException {\n        setMetastoreProperty(key, newFolder(basedir, folder).toAbsolutePath().toString());\n    }\n\n    protected final void setMetastoreProperty(String key, String value) {\n        hiveConf.set(key, value);\n        System.setProperty(key, value);\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/ThrowOnTimeout.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.apache.commons.lang.time.StopWatch;\nimport org.junit.rules.TestRule;\nimport org.junit.runner.Description;\nimport org.junit.runners.model.Statement;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ThrowOnTimeout extends Statement {\n    private static final Logger LOGGER = LoggerFactory.getLogger(ThrowOnTimeout.class);\n\n    private final Statement originalStatement;\n\n    private final HiveRunnerConfig config;\n    private final Object target;\n\n    private Throwable statementException;\n    private boolean finished = false;\n\n    public ThrowOnTimeout(Statement originalStatement, HiveRunnerConfig config, Object target) {\n        this.originalStatement = originalStatement;\n        this.config = config;\n        this.target = target;\n    }\n\n    @Override\n    public void evaluate() throws Throwable {\n        /*\n         * Reset the statementException before the test is run to prevent false errors during repeated execution.\n         */\n        statementException = null;\n        final StopWatch stopWatch = new StopWatch();\n\n        if (config.isTimeoutEnabled()) {\n            LOGGER.info(\"Starting timeout monitoring ({}s) of test case {}.\", config.getTimeoutSeconds(), target);\n        }\n\n        Thread statementThread = new Thread(new Runnable() {\n            @Override\n            public void run() {\n                try {\n                    stopWatch.start();\n                    originalStatement.evaluate();\n                    finished = true;\n                } catch (InterruptedException e) {\n                    // Ignore the InterruptedException\n                    LOGGER.debug(e.getMessage(), e);\n                } catch (Throwable e) {\n                    synchronized (target) {\n                        statementException = e;\n                    }\n                }\n            }\n        });\n\n        statementThread.start();\n        statementThread.join(config.getTimeoutSeconds() * 1000);\n\n        synchronized (target) {\n            if (statementException != null) {\n                throw statementException;\n            } else if (!finished) {\n                if (config.isTimeoutEnabled()) {\n                    statementThread.interrupt();\n                    throw new TimeoutException(\n                            String.format(\"test timed out after %d seconds\", config.getTimeoutSeconds()));\n                } else {\n                    LOGGER.warn(\"Test ran for {} seconds. Timeout disabled. See class {} for configuration options.\",\n                            stopWatch.getTime() / 1000, HiveRunnerConfig.class.getName());\n                }\n            }\n        }\n\n        statementThread.join();\n\n        if (statementException != null) {\n            throw statementException;\n        }\n    }\n\n    public static TestRule create(final HiveRunnerConfig config, final Object target) {\n        return new TestRule() {\n            @Override\n            public Statement apply(Statement base, Description description) {\n                return new ThrowOnTimeout(base, config, target);\n            }\n        };\n    }\n}"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/TimeoutException.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\npublic class TimeoutException extends RuntimeException {\n\n    private static final long serialVersionUID = 1L;\n\n    public TimeoutException() {\n        super();\n    }\n\n    public TimeoutException(String message) {\n        super(message);\n    }\n\n    public TimeoutException(String message, Throwable cause) {\n        super(message, cause);\n    }\n\n    public TimeoutException(Throwable cause) {\n        super(cause);\n    }\n\n    protected TimeoutException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {\n        super(message, cause, enableSuppression, writableStackTrace);\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/annotations/HiveProperties.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.annotations;\n\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Marks a field to contain properties that will be appended to the HiveConf.\n * <p>\n * The field should be of type <pre>Map&lt;String, String&gt;</pre>.\n * </p><p>\n * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n * </p>\n */\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface HiveProperties {\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/annotations/HiveResource.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.annotations;\n\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Marks a field to contain test data input. The field might either be of type String, File or Path.\n * The data will be copied into the specified target file by the HiveRunner engine.\n * <p>\n * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n * </p>\n */\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface HiveResource {\n\n    /**\n     * Specifies where the data should be made available in HDFS.\n     * <p>\n     * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n     * </p>\n     */\n    String targetFile();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/annotations/HiveRunnerSetup.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.annotations;\n\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Annotates a field that configures the hive runner runtime.\n * So far fields of type {@link com.klarna.hiverunner.config.HiveRunnerConfig} are supported.\n */\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface HiveRunnerSetup {\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/annotations/HiveSQL.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.annotations;\n\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Marks a field (of type HiveShell) in a unit test. This field with its annotation is mandatory.\n * The HiveRunner will set the HiveShell instance before each test method is called.\n * <p>\n * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n * </p>\n */\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface HiveSQL {\n\n    /**\n     * The hive sql files subject to test. Files will be executed in order\n     */\n    String[] files();\n\n    /**\n     * If the shell should be started automatically before the JUnit test method is called.\n     * <p>\n     * If set to false this leaves the tester to do additional setup in @BeforeEach (for JUnit 5) or @Before (for JUnit 4) or within actual test method. However,\n     * HiveShell.start() has to be called explicit when setup is done.\n     * </p>\n     */\n    boolean autoStart() default true;\n\n    /**\n     * The encoding of the given files. Will default to java.nio.charset.Charset#defaultCharset\n     */\n    String encoding() default \"\";\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/annotations/HiveSetupScript.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.annotations;\n\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\n\n/**\n * Marks a field to refer to a setup script. The field should be of type String, File or Path.\n * If its a String the value of the field should be the actual script, not a path.\n * <p>\n * Please refer to test class {@code com.klarna.hiverunner.examples.HelloHiveRunnerTest} for usage examples.\n * </p>\n */\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface HiveSetupScript {\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/HiveResource.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport org.apache.commons.lang.builder.ToStringBuilder;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\n\n/**\n * Representation of a resource configuration\n */\nclass HiveResource {\n    private final String targetFile;\n    private final ByteArrayOutputStream byteArrayOutputStream;\n\n    HiveResource(String targetFile) throws IOException {\n        this(targetFile, new ByteArrayOutputStream());\n    }\n\n    HiveResource(String targetFile, Path dataFile) throws IOException {\n        this(targetFile, createOutputStream(Files.readAllBytes(dataFile)));\n    }\n\n    HiveResource(String targetFile, String data) throws IOException {\n        this(targetFile, createOutputStream(data.getBytes(StandardCharsets.UTF_8)));\n    }\n\n    private HiveResource(String targetFile, ByteArrayOutputStream byteArrayOutputStream) {\n        this.targetFile = targetFile;\n        this.byteArrayOutputStream = byteArrayOutputStream;\n    }\n\n    private static ByteArrayOutputStream createOutputStream(byte[] data) throws IOException {\n        ByteArrayOutputStream baos = new ByteArrayOutputStream();\n        baos.write(data);\n        baos.close();\n        return baos;\n    }\n\n    String getTargetFile() {\n        return targetFile;\n    }\n\n    @Override\n    public String toString() {\n        return ToStringBuilder.reflectionToString(this);\n    }\n\n    public ByteArrayOutputStream getOutputStream() {\n        return byteArrayOutputStream;\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/HiveRunnerScript.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport java.nio.file.Path;\n\npublic class HiveRunnerScript implements Script {\n\n    private Path path;\n    private String sqlText;\n    private int index;\n\n    public HiveRunnerScript(int index, Path path, String sqlText) {\n        this.index = index;\n        this.path = path;\n        this.sqlText = sqlText;\n    }\n\n    @Override\n    public int getIndex() {\n        return index;\n    }\n\n    /* (non-Javadoc)\n     * @see com.klarna.hiverunner.builder.Script#getPath()\n     */\n    @Override\n    public Path getPath() {\n        return path;\n    }\n\n    /* (non-Javadoc)\n     * @see com.klarna.hiverunner.builder.Script#getSqlText()\n     */\n    @Override\n    public String getSql() {\n        return sqlText;\n    }\n\n    @Override\n    public int hashCode() {\n        final int prime = 31;\n        int result = 1;\n        result = prime * result + index;\n        result = prime * result + ((path == null) ? 0 : path.hashCode());\n        result = prime * result + ((sqlText == null) ? 0 : sqlText.hashCode());\n        return result;\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        if (this == obj)\n            return true;\n        if (obj == null)\n            return false;\n        if (getClass() != obj.getClass())\n            return false;\n        HiveRunnerScript other = (HiveRunnerScript) obj;\n        if (index != other.index)\n            return false;\n        if (path == null) {\n            if (other.path != null)\n                return false;\n        } else if (!path.equals(other.path))\n            return false;\n        if (sqlText == null) {\n            if (other.sqlText != null)\n                return false;\n        } else if (!sqlText.equals(other.sqlText))\n            return false;\n        return true;\n    }\n\n    @Override\n    public String toString() {\n        return \"HiveRunnerScript [path=\" + path + \", sqlText=\" + sqlText + \", index=\" + index + \"]\";\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/HiveShellBase.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.nio.charset.Charset;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.StandardOpenOption;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.google.common.base.Joiner;\nimport com.google.common.base.Preconditions;\nimport com.klarna.hiverunner.HiveServerContainer;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.data.InsertIntoTable;\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\n\n/**\n * HiveShell implementation delegating to HiveServerContainer\n */\nclass HiveShellBase implements HiveShell {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(HiveShellBase.class);\n    private static final String DEFAULT_NULL_REPRESENTATION = \"NULL\";\n    private static final String DEFAULT_ROW_VALUE_DELIMTER = \"\\t\";\n\n    protected boolean started = false;\n\n    protected final HiveServerContainer hiveServerContainer;\n\n    protected final Map<String, String> hiveConf;\n    protected final Map<String, String> hiveVars;\n    protected final List<String> setupScripts;\n    protected final List<HiveResource> resources;\n    protected final List<Script> scriptsUnderTest;\n    protected final CommandShellEmulator commandShellEmulator;\n    protected StatementLexer lexer;\n    protected Path cwd;\n\n    HiveShellBase(HiveServerContainer hiveServerContainer, Map<String, String> hiveConf, List<String> setupScripts,\n                  List<HiveResource> resources, List<Script> scriptsUnderTest, CommandShellEmulator commandShellEmulator) {\n        this.hiveServerContainer = hiveServerContainer;\n        this.hiveConf = hiveConf;\n        this.commandShellEmulator = commandShellEmulator;\n        this.setupScripts = new ArrayList<>(setupScripts);\n        this.resources = new ArrayList<>(resources);\n        this.scriptsUnderTest = new ArrayList<>(scriptsUnderTest);\n        hiveVars = new HashMap<>();\n        cwd = Paths.get(System.getProperty(\"user.dir\"));\n    }\n\n    @Override\n    public List<String> executeQuery(String hiveSql) {\n        return executeQuery(hiveSql, DEFAULT_ROW_VALUE_DELIMTER, DEFAULT_NULL_REPRESENTATION);\n    }\n\n    @Override\n    public List<String> executeQuery(String hiveSql, String rowValuesDelimitedBy, String replaceNullWith) {\n        assertStarted();\n\n        List<Object[]> resultSet = executeStatement(hiveSql);\n        List<String> result = new ArrayList<>();\n        for (Object[] objects : resultSet) {\n            result.add(Joiner.on(rowValuesDelimitedBy).useForNull(replaceNullWith).join(objects));\n        }\n        return result;\n    }\n\n    @Override\n    public List<Object[]> executeStatement(String hiveSql) {\n        assertStarted();\n        return executeStatementWithCommandShellEmulation(hiveSql);\n    }\n\n    private void executeScriptWithCommandShellEmulation(String script) {\n        List<String> statements = lexer.applyToScript(script);\n        executeStatementsWithCommandShellEmulation(statements);\n    }\n\n    private List<Object[]> executeStatementWithCommandShellEmulation(String statement) {\n        List<String> statements = lexer.applyToStatement(statement);\n        return executeStatementsWithCommandShellEmulation(statements);\n    }\n\n    private List<Object[]> executeStatementsWithCommandShellEmulation(List<String> hiveSqlStatements) {\n        List<Object[]> results = new ArrayList<>();\n        for (String hiveSqlStatement : hiveSqlStatements) {\n            results.addAll(hiveServerContainer.executeStatement(hiveSqlStatement));\n        }\n        return results;\n    }\n\n    @Override\n    public void execute(String hiveSql) {\n        assertStarted();\n        executeScriptWithCommandShellEmulation(hiveSql);\n    }\n\n    @Override\n    public void execute(File file) {\n        assertStarted();\n        execute(Charset.defaultCharset(), file);\n    }\n\n    @Override\n    public void execute(Path path) {\n        assertStarted();\n        execute(Charset.defaultCharset(), path);\n    }\n\n    @Override\n    public void execute(Charset charset, File file) {\n        assertStarted();\n        execute(charset, Paths.get(file.toURI()));\n    }\n\n    @Override\n    public void execute(Charset charset, Path path) {\n        assertStarted();\n        assertFileExists(path);\n        List<String> hiveSqlStatements = lexer.applyToPath(path);\n        executeStatementsWithCommandShellEmulation(hiveSqlStatements);\n    }\n\n    @Override\n    public void start() {\n        assertNotStarted();\n        started = true;\n\n        lexer = new StatementLexer(cwd, Charset.defaultCharset(), commandShellEmulator);\n\n        hiveServerContainer.init(hiveConf, hiveVars);\n\n        executeSetupScripts();\n\n        prepareResources();\n\n        executeScriptsUnderTest();\n    }\n\n    @Override\n    public void addSetupScript(String script) {\n        assertNotStarted();\n        setupScripts.add(script);\n    }\n\n    @Override\n    public void addSetupScripts(Charset charset, Path... scripts) {\n        assertNotStarted();\n        for (Path script : scripts) {\n            assertFileExists(script);\n            try {\n                String setupScript = new String(Files.readAllBytes(script), charset);\n                setupScripts.add(setupScript);\n            } catch (IOException e) {\n                throw new IllegalArgumentException(\n                        \"Unable to read setup script file '\" + script + \"': \" + e.getMessage(), e);\n            }\n        }\n    }\n\n    @Override\n    public void addSetupScripts(Charset charset, File... scripts) {\n        Path[] paths = new Path[scripts.length];\n        for (int i = 0; i < paths.length; i++) {\n            paths[i] = Paths.get(scripts[i].toURI());\n        }\n        addSetupScripts(charset, paths);\n    }\n\n    @Override\n    public void addSetupScripts(File... scripts) {\n        addSetupScripts(Charset.defaultCharset(), scripts);\n    }\n\n    @Override\n    public void addSetupScripts(Path... scripts) {\n        addSetupScripts(Charset.defaultCharset(), scripts);\n    }\n\n    @Override\n    public Path getBaseDir() {\n        return hiveServerContainer.getBaseDir();\n    }\n\n    @Override\n    public String expandVariableSubstitutes(String expression) {\n        assertStarted();\n        HiveConf hiveConf = getHiveConf();\n        Preconditions.checkNotNull(hiveConf);\n        return hiveServerContainer.getVariableSubstitution().substitute(hiveConf, expression);\n    }\n\n    @Override\n    public void setProperty(String key, String value) {\n        setHiveConfValue(key, value);\n    }\n\n    @Override\n    public void setHiveConfValue(String key, String value) {\n        assertNotStarted();\n        hiveConf.put(key, value);\n    }\n\n    @Override\n    public HiveConf getHiveConf() {\n        assertStarted();\n        return hiveServerContainer.getHiveConf();\n    }\n\n    @Override\n    public OutputStream getResourceOutputStream(String targetFile) {\n        try {\n            assertNotStarted();\n            HiveResource resource = new HiveResource(targetFile);\n            resources.add(resource);\n            OutputStream hiveShellStateAwareOutputStream = createPreStartOutputStream(resource.getOutputStream());\n            return hiveShellStateAwareOutputStream;\n        } catch (IOException e) {\n            throw new IllegalStateException(e.getMessage(), e);\n        }\n    }\n\n    @Override\n    public void setHiveVarValue(String var, String value) {\n        assertNotStarted();\n        hiveVars.put(var, value);\n    }\n\n    @Override\n    public void addResource(String targetFile, String data) {\n        try {\n            assertNotStarted();\n            resources.add(new HiveResource(targetFile, data));\n        } catch (IOException e) {\n            throw new IllegalStateException(e.getMessage(), e);\n        }\n    }\n\n    @Override\n    public void addResource(String targetFile, Path sourceFile) {\n        try {\n            assertNotStarted();\n            assertFileExists(sourceFile);\n            resources.add(new HiveResource(targetFile, sourceFile));\n        } catch (IOException e) {\n            throw new IllegalStateException(e.getMessage(), e);\n        }\n    }\n\n    @Override\n    public void addResource(String targetFile, File sourceFile) {\n        addResource(targetFile, Paths.get(sourceFile.toURI()));\n    }\n\n    @Override\n    public InsertIntoTable insertInto(String databaseName, String tableName) {\n        assertStarted();\n        return InsertIntoTable.newInstance(databaseName, tableName, getHiveConf());\n    }\n\n    private void executeSetupScripts() {\n        for (String setupScript : setupScripts) {\n            LOGGER.debug(\"Executing script: \" + setupScript);\n            executeScriptWithCommandShellEmulation(setupScript);\n        }\n    }\n\n    private void prepareResources() {\n        for (HiveResource resource : resources) {\n            String expandedPath = hiveServerContainer.expandVariableSubstitutes(resource.getTargetFile());\n\n            assertResourcePreconditions(resource, expandedPath);\n\n            Path targetFile = Paths.get(expandedPath);\n\n            // Create target file in the tmp dir and write test data to it.\n            try {\n                Files.createDirectories(targetFile.getParent());\n                OutputStream targetFileOutputStream = Files.newOutputStream(targetFile, StandardOpenOption.CREATE_NEW);\n                targetFileOutputStream.write(resource.getOutputStream().toByteArray());\n                resource.getOutputStream().close();\n                targetFileOutputStream.close();\n            } catch (IOException e) {\n                throw new IllegalStateException(\"Failed to create resource target file: \" + targetFile + \" (\"\n                        + resource.getTargetFile() + \"): \" + e.getMessage(), e);\n            }\n\n            LOGGER.debug(\"Created hive resource \" + targetFile);\n\n        }\n    }\n\n    private void executeScriptsUnderTest() {\n        for (Script script : scriptsUnderTest) {\n            try {\n                executeScriptWithCommandShellEmulation(script.getSql());\n            } catch (Exception e) {\n                throw new IllegalStateException(\"Failed to executeScript '\" + script + \"': \" + e.getMessage(), e);\n            }\n        }\n    }\n\n    protected final void assertResourcePreconditions(HiveResource resource, String expandedPath) {\n        String unexpandedPropertyPattern = \".*\\\\$\\\\{.*\\\\}.*\";\n        boolean isUnexpanded = !expandedPath.matches(unexpandedPropertyPattern);\n\n        Preconditions.checkArgument(isUnexpanded,\n                \"File path %s contains \" + \"unresolved references. Original arg was: %s\", expandedPath,\n                resource.getTargetFile());\n\n        boolean isTargetFileWithinTestDir = expandedPath\n                .startsWith(hiveServerContainer.getBaseDir().toString());\n\n        Preconditions.checkArgument(isTargetFileWithinTestDir,\n                \"All resource target files should be created in a subdirectory to the test case basedir %s : %s\",\n                hiveServerContainer.getBaseDir().getRoot(), resource.getTargetFile());\n    }\n\n    protected final void assertFileExists(Path file) {\n        Preconditions.checkNotNull(file, \"File argument is null\");\n        Preconditions.checkArgument(Files.exists(file), \"File %s does not exist\", file);\n        Preconditions.checkArgument(Files.isRegularFile(file), \"%s is not a file\", file);\n    }\n\n    protected final void assertNotStarted() {\n        Preconditions.checkState(!started, \"HiveShell was already started\");\n    }\n\n    protected final void assertStarted() {\n        Preconditions.checkState(started, \"HiveShell was not started\");\n    }\n\n    private OutputStream createPreStartOutputStream(ByteArrayOutputStream resourceOutputStream) {\n        return new OutputStream() {\n            @Override\n            public void write(int b) throws IOException {\n                // It should not be possible to write to the stream after the\n                // shell has been started.\n                assertNotStarted();\n                resourceOutputStream.write(b);\n            }\n        };\n    }\n\n    @Override\n    public List<String> executeQuery(File script) {\n        return executeQuery(Charset.defaultCharset(), script);\n    }\n\n    @Override\n    public List<String> executeQuery(Path script) {\n        return executeQuery(Charset.defaultCharset(), script);\n    }\n\n    @Override\n    public List<String> executeQuery(Charset charset, File script) {\n        return executeQuery(charset, script, DEFAULT_ROW_VALUE_DELIMTER, DEFAULT_NULL_REPRESENTATION);\n    }\n\n    @Override\n    public List<String> executeQuery(Charset charset, Path script) {\n        return executeQuery(charset, script, DEFAULT_ROW_VALUE_DELIMTER, DEFAULT_NULL_REPRESENTATION);\n    }\n\n    @Override\n    public List<String> executeQuery(File script, String rowValuesDelimitedBy, String replaceNullWith) {\n        return executeQuery(Charset.defaultCharset(), script, rowValuesDelimitedBy, replaceNullWith);\n    }\n\n    @Override\n    public List<String> executeQuery(Path script, String rowValuesDelimitedBy, String replaceNullWith) {\n        return executeQuery(Charset.defaultCharset(), script, rowValuesDelimitedBy, replaceNullWith);\n    }\n\n    @Override\n    public List<String> executeQuery(Charset charset, File script, String rowValuesDelimitedBy,\n                                     String replaceNullWith) {\n        return executeQuery(charset, Paths.get(script.toURI()), rowValuesDelimitedBy, replaceNullWith);\n    }\n\n    public List<Script> getScriptsUnderTest() {\n        return scriptsUnderTest;\n    }\n\n    @Override\n    public List<String> executeQuery(Charset charset, Path script, String rowValuesDelimitedBy,\n                                     String replaceNullWith) {\n        assertStarted();\n        assertFileExists(script);\n        try {\n            String statements = new String(Files.readAllBytes(script), charset);\n            List<Statement> splitStatements = new StatementSplitter(commandShellEmulator).split(statements);\n            if (splitStatements.size() != 1) {\n                throw new IllegalArgumentException(\"Script '\" + script + \"' must contain a single valid statement.\");\n            }\n            Statement statement = splitStatements.get(0);\n            return executeQuery(statement.getSql(), rowValuesDelimitedBy, replaceNullWith);\n        } catch (IOException e) {\n            throw new IllegalArgumentException(\"Unable to read setup script file '\" + script + \"': \" + e.getMessage(),\n                    e);\n        }\n    }\n\n    @Override\n    public void setCwd(Path cwd) {\n        assertNotStarted();\n        this.cwd = cwd;\n    }\n\n    @Override\n    public Path getCwd() {\n        return cwd;\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/HiveShellBuilder.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport com.google.common.base.Preconditions;\nimport com.klarna.hiverunner.HiveServerContainer;\nimport com.klarna.hiverunner.HiveShellContainer;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * Builds a HiveShell.\n */\npublic class HiveShellBuilder {\n    private List<Script> scriptsUnderTest = new ArrayList<>();\n    private final Map<String, String> props = new HashMap<>();\n    private HiveServerContainer hiveServerContainer;\n    private final List<HiveResource> resources = new ArrayList<>();\n    private final List<String> setupScripts = new ArrayList<>();\n    private CommandShellEmulator commandShellEmulator = HiveCliEmulator.INSTANCE;\n\n    public void setHiveServerContainer(HiveServerContainer hiveServerContainer) {\n        this.hiveServerContainer = hiveServerContainer;\n    }\n\n    public void putAllProperties(Map<String, String> props) {\n        this.props.putAll(props);\n    }\n\n    public void addSetupScript(String script) {\n        this.setupScripts.add(script);\n    }\n\n    public void addResource(String targetFile, Path dataFile) throws IOException {\n        resources.add(new HiveResource(targetFile, dataFile));\n    }\n\n    public void addResource(String targetFile, String data) throws IOException {\n        resources.add(new HiveResource(targetFile, data));\n    }\n\n    public void setScriptsUnderTest(List<Path> scriptPaths, Charset charset) {\n        scriptsUnderTest.addAll(fromScriptPaths(scriptPaths, charset));\n    }\n\n    public List<Script> fromScriptPaths(List<Path> scriptPaths, Charset charset) {\n        List<Script> scripts = new ArrayList();\n        int index = 0;\n        for (Path path : scriptPaths) {\n            Preconditions.checkState(Files.exists(path), \"File %s does not exist\", path);\n            try {\n                String sqlText = new String(Files.readAllBytes(path), charset);\n                scripts.add(new HiveRunnerScript(index++, path, sqlText));\n            } catch (IOException e) {\n                throw new IllegalArgumentException(\"Failed to load script file '\" + path + \"'\");\n            }\n        }\n        return scripts;\n    }\n\n    public void setCommandShellEmulation(CommandShellEmulator commandShellEmulator) {\n        this.commandShellEmulator = commandShellEmulator;\n    }\n\n    public HiveShellContainer buildShell() {\n        return new HiveShellTearable(hiveServerContainer, props, setupScripts, resources, scriptsUnderTest, commandShellEmulator);\n    }\n\n    public void overrideScriptsUnderTest(List<? extends Script> scripts) {\n        scriptsUnderTest = new ArrayList<>(scripts);\n    }\n}\n\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/HiveShellTearable.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport com.klarna.hiverunner.HiveServerContainer;\nimport com.klarna.hiverunner.HiveShellContainer;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\n\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * HiveShellContainer implementation that will do a full tear down of the hive server after test method is executed.\n */\nclass HiveShellTearable extends HiveShellBase implements HiveShellContainer {\n\n    HiveShellTearable(HiveServerContainer hiveServerContainer, Map<String, String> hiveConf,\n                      List<String> setupScripts, List<HiveResource> resources,\n                      List<Script> scriptsUnderTest, CommandShellEmulator commandShellEmulator) {\n        super(hiveServerContainer, hiveConf, setupScripts, resources, scriptsUnderTest, commandShellEmulator);\n    }\n\n    @Override\n    public void tearDown() {\n        hiveServerContainer.tearDown();\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/Script.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport java.nio.file.Path;\n\npublic interface Script {\n\n    /**\n     * index of script within all scripts in source\n     */\n    int getIndex();\n\n    Path getPath();\n\n    String getSql();\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/builder/Statement.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\npublic interface Statement {\n\n    /**\n     * Index of statement within all statements of script\n     */\n    int getIndex();\n\n    /**\n     * Original sql of the statement\n     */\n    String getSql();\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/config/HiveRunnerConfig.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.config;\n\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Properties;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\n\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulatorFactory;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\n\n/**\n * HiveRunner runtime configuration.\n *\n * Configure with System properties via mvn like\n * <pre>\n * &lt;plugin&gt;\n *      &lt;groupId&gt;org.apache.maven.plugins&lt;/groupId&gt;\n *      &lt;artifactId&gt;maven-surefire-plugin&lt;/artifactId&gt;\n *      &lt;version&gt;2.17&lt;/version&gt;\n *      &lt;configuration>\n *          ...\n *          &lt;systemProperties&gt;\n *              &lt;hiveconf_any.hive.conf&gt;1000&lt;/hiveconf_any.hive.conf&gt;\n *              &lt;enableTimeout&gt;false&lt;/enableTimeout&gt;\n *              &lt;timeoutSeconds&gt;30&lt;/timeoutSeconds&gt;\n *              &lt;timeoutRetries&gt;2&lt;/timeoutRetries&gt;\n *              &lt;commandShellEmulation&gt;BEELINE&lt;/commandShellEmulation&gt;\n *          &lt;/systemProperties&gt;\n *      &lt;/configuration&gt;\n * &lt;/plugin&gt;\n * </pre>\n *\n * Properties may be overridden per test class by annotating a <b>static</b> HiveRunnerConfig field like:\n * <pre>\n *      &#064;HiveRunnerSetup\n *      public final static HiveRunnerConfig config = new HiveRunnerConfig(){{\n *          setTimeoutEnabled(true);\n *          setTimeoutSeconds(15);\n *          setTimeoutRetries(2);\n *          setCommandShellEmulation(CommandShellEmulation.BEELINE);\n *      }};\n * </pre>\n *\n * See the test class<{@code com.klarna.hiverunner.DisabledTimeoutTest} for more information.\n */\npublic class HiveRunnerConfig {\n\n    /**\n     * Enable timeout. Some versions of tez has proven to not always terminate. By enabling timeout,\n     * HiveRunner will kill the current query and attempt to retry the test case a configurable number of times.\n     *\n     * Defaults to disabled\n     */\n    public static final String ENABLE_TIMEOUT_PROPERTY_NAME = \"enableTimeout\";\n    public static final boolean ENABLE_TIMEOUT_DEFAULT = false;\n\n    /**\n     * Seconds to wait for a query to terminate before triggering the timeout.\n     *\n     * Defaults to 30 seconds\n     */\n    public static final String TIMEOUT_SECONDS_PROPERTY_NAME = \"timeoutSeconds\";\n    public static final int TIMEOUT_SECONDS_DEFAULT = 30;\n\n    /**\n     * Number of retries for a test case that keep timing out.\n     *\n     * Defaults to 2 retries\n     */\n    public static final String TIMEOUT_RETRIES_PROPERTY_NAME = \"timeoutRetries\";\n    public static final int TIMEOUT_RETRIES_DEFAULT = 2;\n\n    /**\n     * Suffix used to flag a system property to be a hiveconf setting.\n     */\n    public static final String HIVECONF_SYSTEM_OVERRIDE_PREFIX = \"hiveconf_\";\n\n    /**\n     * The shell's {@link CommandShellEmulator}.\n     *\n     * Defaults to {@code HIVE_CLI}\n     */\n    public static final String COMMAND_SHELL_EMULATOR_PROPERTY_NAME = \"commandShellEmulator\";\n    public static final String COMMAND_SHELL_EMULATOR_DEFAULT = HiveCliEmulator.INSTANCE.getName();\n\n    private Map<String, Object> config = new HashMap<>();\n\n    private Map<String, String> hiveConfSystemOverride = new HashMap<>();\n\n    /**\n     * Construct a HiveRunnerConfig that will override hiveConf with\n     * System properties of the format 'hiveconf_[hiveconf property name]'.\n     */\n    public HiveRunnerConfig() {\n        this(System.getProperties());\n    }\n\n    /**\n     * Construct a HiveRunnerConfig that will override hiveConf with\n     * the given properties of the format 'hiveconf_[hiveconf property name]'.\n     */\n    public HiveRunnerConfig(Properties systemProperties) {\n        config.put(ENABLE_TIMEOUT_PROPERTY_NAME, load(ENABLE_TIMEOUT_PROPERTY_NAME, ENABLE_TIMEOUT_DEFAULT, systemProperties));\n        config.put(TIMEOUT_RETRIES_PROPERTY_NAME, load(TIMEOUT_RETRIES_PROPERTY_NAME, TIMEOUT_RETRIES_DEFAULT, systemProperties));\n        config.put(TIMEOUT_SECONDS_PROPERTY_NAME, load(TIMEOUT_SECONDS_PROPERTY_NAME, TIMEOUT_SECONDS_DEFAULT, systemProperties));\n        config.put(COMMAND_SHELL_EMULATOR_PROPERTY_NAME, load(COMMAND_SHELL_EMULATOR_PROPERTY_NAME, COMMAND_SHELL_EMULATOR_DEFAULT, systemProperties));\n\n        hiveConfSystemOverride = loadHiveConfSystemOverrides(systemProperties);\n    }\n\n    public boolean isTimeoutEnabled() {\n        return getBoolean(ENABLE_TIMEOUT_PROPERTY_NAME);\n    }\n\n    public int getTimeoutRetries() {\n        return getInteger(TIMEOUT_RETRIES_PROPERTY_NAME);\n    }\n\n    public int getTimeoutSeconds() {\n        return getInteger(TIMEOUT_SECONDS_PROPERTY_NAME);\n    }\n\n    /**\n     * Get the configured hive.execution.engine. If not set it will default to the default value of HiveConf\n     */\n    public String getHiveExecutionEngine() {\n        String executionEngine = hiveConfSystemOverride.get(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname);\n        return executionEngine == null ? HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.getDefaultValue() : executionEngine;\n    }\n\n    public Map<String, String> getHiveConfSystemOverride() {\n        return hiveConfSystemOverride;\n    }\n\n    /**\n     * Determines the statement parsing behaviour of the interactive shell. Provided to emulate slight differences\n     * between different clients.\n     */\n    public CommandShellEmulator getCommandShellEmulator() {\n        return CommandShellEmulatorFactory.valueOf(getString(COMMAND_SHELL_EMULATOR_PROPERTY_NAME).toUpperCase());\n    }\n\n    public void setTimeoutEnabled(boolean isEnabled) {\n        config.put(ENABLE_TIMEOUT_PROPERTY_NAME, isEnabled);\n    }\n\n    public void setTimeoutRetries(int retries) {\n        config.put(TIMEOUT_RETRIES_PROPERTY_NAME, retries);\n    }\n\n    public void setTimeoutSeconds(int timeout) {\n        config.put(TIMEOUT_SECONDS_PROPERTY_NAME, timeout);\n    }\n\n    public void setHiveExecutionEngine(String executionEngine) {\n        hiveConfSystemOverride.put(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, executionEngine);\n    }\n\n    public void setCommandShellEmulator(CommandShellEmulator commandShellEmulator) {\n        config.put(COMMAND_SHELL_EMULATOR_PROPERTY_NAME, commandShellEmulator.getName());\n    }\n\n    /**\n     * Copy values from the inserted config to this config. Note that if properties has not been explicitly set,\n     * the defaults will apply.\n     */\n    public void override(HiveRunnerConfig hiveRunnerConfig) {\n        config.putAll(hiveRunnerConfig.config);\n        hiveConfSystemOverride.putAll(hiveRunnerConfig.hiveConfSystemOverride);\n    }\n\n    private static boolean load(String property, boolean defaultValue, Properties sysProperties) {\n        String value = sysProperties.getProperty(property);\n        return value == null ? defaultValue : Boolean.parseBoolean(value);\n    }\n\n    private static String load(String property, String defaultValue, Properties sysProperties) {\n        String value = sysProperties.getProperty(property);\n        return value == null ? defaultValue : value;\n    }\n\n    private static int load(String property, int defaultValue, Properties sysProperties) {\n        String value = sysProperties.getProperty(property);\n        return value == null ? defaultValue : Integer.parseInt(value);\n    }\n\n\n    private boolean getBoolean(String key) {\n        return (boolean) config.get(key);\n    }\n\n\n    private int getInteger(String key) {\n        return (int) config.get(key);\n    }\n\n    private String getString(String key) {\n        return (String) config.get(key);\n    }\n\n    private static Map<String, String> loadHiveConfSystemOverrides(Properties systemProperties) {\n        Map<String, String> hiveConfSystemOverride = new HashMap<>();\n\n        for (String sysKey : systemProperties.stringPropertyNames()) {\n            if (sysKey.startsWith(HIVECONF_SYSTEM_OVERRIDE_PREFIX)) {\n                String hiveConfKey = sysKey.substring(HIVECONF_SYSTEM_OVERRIDE_PREFIX.length());\n                hiveConfSystemOverride.put(hiveConfKey, systemProperties.getProperty(sysKey));\n            }\n        }\n\n        return hiveConfSystemOverride;\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/Converters.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.binaryTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.byteTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.doubleTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.floatTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.shortTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo;\n\nimport java.math.BigDecimal;\nimport java.sql.Date;\nimport java.sql.Timestamp;\nimport java.util.Map;\n\nimport org.apache.commons.beanutils.ConversionException;\nimport org.apache.commons.beanutils.ConvertUtilsBean;\nimport org.apache.commons.beanutils.Converter;\nimport org.apache.commons.beanutils.converters.BooleanConverter;\nimport org.apache.commons.beanutils.converters.ByteArrayConverter;\nimport org.apache.commons.beanutils.converters.ByteConverter;\nimport org.apache.commons.beanutils.converters.DoubleConverter;\nimport org.apache.commons.beanutils.converters.FloatConverter;\nimport org.apache.commons.beanutils.converters.IntegerConverter;\nimport org.apache.commons.beanutils.converters.LongConverter;\nimport org.apache.commons.beanutils.converters.ShortConverter;\nimport org.apache.commons.beanutils.converters.StringConverter;\nimport org.apache.hadoop.hive.common.type.HiveChar;\nimport org.apache.hadoop.hive.common.type.HiveDecimal;\nimport org.apache.hadoop.hive.common.type.HiveVarchar;\nimport org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;\nimport org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;\nimport org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;\nimport org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;\n\nimport com.google.common.collect.ImmutableMap;\n\n/**\n * A utility class for converting from {@link String Strings} into the target Hive table's column type.\n */\npublic final class Converters {\n\n    static final Map<PrimitiveTypeInfo, Class<?>> TYPES = ImmutableMap\n            .<PrimitiveTypeInfo, Class<?>>builder()\n            .put(stringTypeInfo, String.class)\n            .put(booleanTypeInfo, Boolean.class)\n            .put(byteTypeInfo, Byte.class)\n            .put(shortTypeInfo, Short.class)\n            .put(intTypeInfo, Integer.class)\n            .put(longTypeInfo, Long.class)\n            .put(floatTypeInfo, Float.class)\n            .put(doubleTypeInfo, Double.class)\n            .put(dateTypeInfo, Date.class)\n            .put(timestampTypeInfo, Timestamp.class)\n            .put(binaryTypeInfo, Byte[].class)\n            .build();\n\n    private static final ConvertUtilsBean CONVERTER;\n\n    static {\n        CONVERTER = new ConvertUtilsBean();\n        CONVERTER.register(new StringConverter(), String.class);\n        CONVERTER.register(new BooleanConverter(), Boolean.class);\n        CONVERTER.register(new ByteConverter(), Byte.class);\n        CONVERTER.register(new ShortConverter(), Short.class);\n        CONVERTER.register(new IntegerConverter(), Integer.class);\n        CONVERTER.register(new LongConverter(), Long.class);\n        CONVERTER.register(new FloatConverter(), Float.class);\n        CONVERTER.register(new DoubleConverter(), Double.class);\n        CONVERTER.register(new HiveDateConverter(), Date.class);\n        CONVERTER.register(new HiveTimestampConverter(), Timestamp.class);\n        CONVERTER.register(new ByteArrayConverter(), Byte[].class);\n        CONVERTER.register(new HiveDecimalConverter(), HiveDecimal.class);\n        CONVERTER.register(new HiveVarcharConverter(), HiveVarchar.class);\n        CONVERTER.register(new HiveCharConverter(), HiveChar.class);\n    }\n\n    private Converters() {\n    }\n\n    static Class<?> type(PrimitiveTypeInfo typeInfo) {\n        Class<?> type = TYPES.get(typeInfo);\n        if (type == null) {\n            if (typeInfo instanceof DecimalTypeInfo) {\n                type = HiveDecimal.class;\n            } else if (typeInfo instanceof VarcharTypeInfo) {\n                type = HiveVarchar.class;\n            } else if (typeInfo instanceof CharTypeInfo) {\n                type = HiveChar.class;\n            } else {\n                type = String.class;\n            }\n        }\n        return type;\n    }\n\n    /**\n     * Attempts to convert the input value into the target type. If the input value is {@code null} then {@code null} is\n     * returned. If the input value is a String then an attempt is made to convert it into the target type. If the input\n     * value is not a {@link String} then it is assumed the user has explicitly chosen the required type and no attempt is\n     * made to perform a conversion. This may result in Hive throwing an error if the incorrect type was chosen.\n     *\n     * @param value The input value.\n     * @param typeInfo The target Table's column type.\n     */\n    public static Object convert(Object value, PrimitiveTypeInfo typeInfo) {\n        if (value == null) {\n            return null;\n        }\n        if (value instanceof String) {\n            return CONVERTER.convert((String) value, type(typeInfo));\n        }\n        return value;\n    }\n\n    private static class HiveDecimalConverter implements Converter {\n        @Override\n        public Object convert(@SuppressWarnings(\"rawtypes\") Class type, Object value) {\n            try {\n                return HiveDecimal.create(new BigDecimal(value.toString()));\n            } catch (NumberFormatException e) {\n                throw new ConversionException(e);\n            }\n        }\n    }\n\n    private static class HiveDateConverter implements Converter {\n        @Override\n        public Object convert(@SuppressWarnings(\"rawtypes\") Class type, Object value) {\n            try {\n                return org.apache.hadoop.hive.common.type.Date.valueOf(value.toString());\n            } catch (IllegalArgumentException e) {\n                throw new ConversionException(e);\n            }\n        }\n    }\n\n    private static class HiveTimestampConverter implements Converter {\n        @Override\n        public Object convert(@SuppressWarnings(\"rawtypes\") Class type, Object value) {\n            try {\n                return org.apache.hadoop.hive.common.type.Timestamp.valueOf(value.toString());\n            } catch (IllegalArgumentException e) {\n                throw new ConversionException(e);\n            }\n        }\n    }\n\n    private static class HiveVarcharConverter implements Converter {\n        @Override\n        public Object convert(@SuppressWarnings(\"rawtypes\") Class type, Object value) {\n            return new HiveVarchar(value.toString(), -1);\n        }\n    }\n\n    private static class HiveCharConverter implements Converter {\n        @Override\n        public Object convert(@SuppressWarnings(\"rawtypes\") Class type, Object value) {\n            return new HiveChar(value.toString(), -1);\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/FileParser.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport java.io.File;\nimport java.util.List;\n\nimport org.apache.hive.hcatalog.data.schema.HCatSchema;\n\n/**\n * A {@link File} parsing class to be used with {@link InsertIntoTable} for inserting data into a Hive table from a\n * {@link File}.\n */\npublic interface FileParser {\n\n    /**\n     * Parses the given file and returns the rows with the requested columns.\n     *\n     * @param file The file to be parsed.\n     * @param schema The full schema of the Hive table.\n     * @param names The requested field names.\n     * @return A {@link List} of rows, each represented by an {@link Object} array.\n     */\n    List<Object[]> parse(File file, HCatSchema schema, List<String> names);\n\n    /**\n     * Parses the given file and returns the column names that are available in the file.\n     *\n     * @param file The file to be parsed\n     * @return A {@link List} of column names as Strings\n     */\n    List<String> getColumnNames(File file);\n\n    /**\n     * Method that checks if the parser has access to column names.\n     * @return\n     */\n    boolean hasColumnNames();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/InsertIntoTable.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport java.io.File;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.hive.hcatalog.api.HCatClient;\nimport org.apache.hive.hcatalog.api.HCatTable;\nimport org.apache.hive.hcatalog.common.HCatException;\n\nimport com.klarna.hiverunner.HiveShell;\n\n/**\n * A class for fluently creating a list of rows and inserting them into a table.\n */\npublic final class InsertIntoTable {\n\n    private final TableDataBuilder builder;\n    private final TableDataInserter inserter;\n\n    /**\n     * Factory method for creating an {@link InsertIntoTable}.\n     * <p>\n     * This method is intended to be called via {@link HiveShell#insertInto(String, String)}.\n     * </p>\n     *\n     * @param databaseName The database name.\n     * @param tableName The table name.\n     * @param conf The {@link HiveConf}.\n     * @return InsertIntoTable\n     */\n    public static InsertIntoTable newInstance(String databaseName, String tableName, HiveConf conf) {\n        TableDataBuilder builder = new TableDataBuilder(getHCatTable(databaseName, tableName, conf));\n        TableDataInserter inserter = new TableDataInserter(databaseName, tableName, conf);\n        return new InsertIntoTable(builder, inserter);\n    }\n\n    private static HCatTable getHCatTable(String databaseName, String tableName, HiveConf conf) {\n        HCatClient client = null;\n        try {\n            client = HCatClient.create(conf);\n            return client.getTable(databaseName, tableName);\n        } catch (HCatException e) {\n            throw new RuntimeException(\"Unable to get table from the metastore.\", e);\n        } finally {\n            if (client != null) {\n                try {\n                    client.close();\n                } catch (HCatException e) {\n                    throw new RuntimeException(\"Unable close client.\", e);\n                }\n            }\n        }\n    }\n\n    InsertIntoTable(TableDataBuilder builder, TableDataInserter inserter) {\n        this.builder = builder;\n        this.inserter = inserter;\n    }\n\n    /**\n     * Defines a subset of columns (a column name mask) so that only pertinent columns can be set.\n     * <p>\n     * e.g.\n     *\n     * <pre>\n     * {@code\n     * tableDataBuilder\n     *     .withColumns(\"col1\", \"col3\")\n     *     .addRow(\"value1\", \"value3\")\n     * }\n     * </pre>\n     * </p>\n     *\n     * @param names The column names.\n     * @return {@code this}\n     * @throws IllegalArgumentException if a column name does not exist in the table.\n     */\n    public InsertIntoTable withColumns(String... names) {\n        builder.withColumns(names);\n        return this;\n    }\n\n    /**\n     * Resets the column name mask to all the columns in the table.\n     *\n     * @return {@code this}\n     */\n    public InsertIntoTable withAllColumns() {\n        builder.withAllColumns();\n        return this;\n    }\n\n    /**\n     * Flushes the current row and creates a new row with {@code null} values for all columns.\n     *\n     * @return {@code this}\n     */\n    public InsertIntoTable newRow() {\n        builder.newRow();\n        return this;\n    }\n\n    /**\n     * Flushes the current row and creates a new row with the values specified.\n     *\n     * @param values The values to set.\n     * @return {@code this}\n     */\n    public InsertIntoTable addRow(Object... values) {\n        builder.addRow(values);\n        return this;\n    }\n\n    /**\n     * Sets the current row with the values specified.\n     *\n     * @param values The values to set.\n     * @return {@code this}\n     */\n    public InsertIntoTable setRow(Object... values) {\n        builder.setRow(values);\n        return this;\n    }\n\n    /**\n     * Adds all rows from the TSV file specified. The default delimiter is tab and the default null value is an empty\n     * string.\n     *\n     * @param file The file to read the data from.\n     * @return {@code this}\n     */\n    public InsertIntoTable addRowsFromTsv(File file) {\n        builder.addRowsFromTsv(file);\n        return this;\n    }\n\n    /**\n     * Adds all rows from the TSV file specified, using the provided delimiter and null value.\n     *\n     * @param file The file to read the data from.\n     * @param delimiter A column delimiter.\n     * @param nullValue Value to be treated as null in the source data.\n     * @return {@code this}\n     */\n    public InsertIntoTable addRowsFromDelimited(File file, String delimiter, Object nullValue) {\n        builder.addRowsFromDelimited(file, delimiter, nullValue);\n        return this;\n    }\n\n    /**\n     * Adds all rows from the file specified, using the provided parser.\n     *\n     * @param file File to read the data from.\n     * @param fileParser Parser to be used to parse the file.\n     * @return {@code this}\n     */\n    public InsertIntoTable addRowsFrom(File file, FileParser fileParser) {\n        builder.addRowsFrom(file, fileParser);\n        return this;\n    }\n\n    /**\n     * Flushes the current row and creates a new row with the same values.\n     *\n     * @return {@code this}\n     */\n    public InsertIntoTable copyRow() {\n        builder.copyRow();\n        return this;\n    }\n\n    /**\n     * Set the given column name to the given value.\n     *\n     * @param name The column name to set.\n     * @param value the value to set.\n     * @return {@code this}\n     * @throws IllegalArgumentException if a column name does not exist in the table.\n     */\n    public InsertIntoTable set(String name, Object value) {\n        builder.set(name, value);\n        return this;\n    }\n\n    /**\n     * Inserts the data into the table. This does not replace any existing data, but appends new part files to the\n     * table/partition location(s).\n     */\n    public void commit() {\n        inserter.insert(builder.build());\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/TableDataBuilder.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static com.google.common.base.Preconditions.checkArgument;\nimport static com.google.common.base.Preconditions.checkNotNull;\nimport static com.google.common.base.Preconditions.checkState;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\nimport org.apache.commons.beanutils.ConversionException;\nimport org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;\nimport org.apache.hive.hcatalog.api.HCatTable;\nimport org.apache.hive.hcatalog.common.HCatException;\nimport org.apache.hive.hcatalog.data.DefaultHCatRecord;\nimport org.apache.hive.hcatalog.data.HCatRecord;\nimport org.apache.hive.hcatalog.data.schema.HCatFieldSchema;\nimport org.apache.hive.hcatalog.data.schema.HCatSchema;\n\nimport com.google.common.base.Function;\nimport com.google.common.collect.FluentIterable;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.ImmutableMap;\nimport com.google.common.collect.ImmutableMultimap;\nimport com.google.common.collect.ImmutableMultimap.Builder;\nimport com.google.common.collect.Multimap;\n\nclass TableDataBuilder {\n\n    private final Builder<Map<String, String>, HCatRecord> rowsBuilder = ImmutableMultimap.builder();\n    private final HCatSchema schema;\n    private final List<HCatFieldSchema> partitionColumns;\n\n    private HCatRecord row;\n    private List<String> names;\n\n    TableDataBuilder(HCatTable table) {\n        schema = new HCatSchema(ImmutableList\n                .<HCatFieldSchema>builder()\n                .addAll(table.getCols())\n                .addAll(table.getPartCols())\n                .build());\n        partitionColumns = table.getPartCols();\n        withAllColumns();\n    }\n\n    TableDataBuilder withColumns(String... names) {\n        checkArgument(checkNotNull(names).length > 0, \"Column names must be provided.\");\n        this.names = new ArrayList<>(names.length);\n        for (String name : names) {\n            checkColumn(name);\n            this.names.add(name);\n        }\n        return this;\n    }\n\n    TableDataBuilder withAllColumns() {\n        names = schema.getFieldNames();\n        return this;\n    }\n\n    TableDataBuilder newRow() {\n        flushRow();\n        row = new DefaultHCatRecord(schema.size());\n        return this;\n    }\n\n    TableDataBuilder addRow(Object... values) {\n        return newRow().setRow(values);\n    }\n\n    TableDataBuilder setRow(Object... values) {\n        checkArgument(values.length == names.size(), \"Expected %d values, got %d\", names.size(), values.length);\n        for (int i = 0; i < values.length; i++) {\n            set(names.get(i), values[i]);\n        }\n        return this;\n    }\n\n    TableDataBuilder addRowsFromTsv(File file) {\n        return addRowsFrom(file, new TsvFileParser());\n    }\n\n    TableDataBuilder addRowsFromDelimited(File file, String delimiter, Object nullValue) {\n        return addRowsFrom(file, new TsvFileParser().withDelimiter(delimiter).withNullValue(nullValue));\n    }\n\n    TableDataBuilder addRowsFrom(File file, FileParser fileParser) {\n        if (fileParser.hasColumnNames()) {\n            checkArgument(names.equals(schema.getFieldNames()),\n                    \"Manual column spec and header column spec are mutually exclusive\");\n            String[] columns = FluentIterable\n                    .from(fileParser.getColumnNames(file))\n                    .transform(toLowerCase())\n                    .toArray(String.class);\n            withColumns(columns);\n        }\n        return addRows(fileParser.parse(file, schema, names));\n    }\n\n    private Function<String, String> toLowerCase() {\n        return new Function<String, String>() {\n            @Override\n            public String apply(String t) {\n                return t.toLowerCase();\n            }\n        };\n    }\n\n    private TableDataBuilder addRows(List<Object[]> rows) {\n        for (Object[] row : rows) {\n            addRow(row);\n        }\n        return this;\n    }\n\n    TableDataBuilder copyRow() {\n        checkState(row != null, \"No previous row to copy.\");\n        HCatRecord copy = new DefaultHCatRecord(new ArrayList<>(row.getAll()));\n        flushRow();\n        row = copy;\n        return this;\n    }\n\n    TableDataBuilder set(String name, Object value) {\n        checkColumn(name);\n        PrimitiveTypeInfo typeInfo;\n        try {\n            typeInfo = schema.get(name).getTypeInfo();\n        } catch (HCatException e) {\n            throw new IllegalArgumentException(\"Error getting type info for \" + name, e);\n        }\n        Object converted;\n        try {\n            converted = Converters.convert(value, typeInfo);\n        } catch (ConversionException e) {\n            throw new IllegalArgumentException(\"Invalid value for \" + name + \". Got '\" + value + \"' (\"\n                    + value.getClass().getSimpleName() + \"). Expected \" + typeInfo.getTypeName() + \".\", e);\n        }\n        try {\n            row.set(name, schema, converted);\n        } catch (HCatException e) {\n            throw new RuntimeException(\"Error setting value for \" + name, e);\n        }\n        return this;\n    }\n\n    private Object get(String name) {\n        checkColumn(name);\n        try {\n            return row.get(name, schema);\n        } catch (HCatException e) {\n            throw new RuntimeException(\"Error getting value for \" + name, e);\n        }\n    }\n\n    private void flushRow() {\n        if (row != null) {\n            rowsBuilder.put(createPartitionSpec(), row);\n        }\n    }\n\n    private Map<String, String> createPartitionSpec() {\n        ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();\n        for (HCatFieldSchema partitionColumn : partitionColumns) {\n            String name = partitionColumn.getName();\n            Object value = get(name);\n            checkState(value != null, \"Value for partition column %s must not be null.\", name);\n            builder.put(name, value.toString());\n        }\n        return builder.build();\n    }\n\n    Multimap<Map<String, String>, HCatRecord> build() {\n        flushRow();\n        return rowsBuilder.build();\n    }\n\n    private void checkColumn(String name) {\n        checkArgument(schema.getFieldNames().contains(name.toLowerCase()), \"Column %s does not exist\", name);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/TableDataInserter.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport java.util.Iterator;\nimport java.util.Map;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.hive.hcatalog.common.HCatException;\nimport org.apache.hive.hcatalog.data.HCatRecord;\nimport org.apache.hive.hcatalog.data.transfer.DataTransferFactory;\nimport org.apache.hive.hcatalog.data.transfer.HCatWriter;\nimport org.apache.hive.hcatalog.data.transfer.WriteEntity;\nimport org.apache.hive.hcatalog.data.transfer.WriterContext;\n\nimport com.google.common.collect.Maps;\nimport com.google.common.collect.Multimap;\n\nclass TableDataInserter {\n\n    private final String databaseName;\n    private final String tableName;\n    private final Map<String, String> config;\n\n    TableDataInserter(String databaseName, String tableName, HiveConf conf) {\n        this.databaseName = databaseName;\n        this.tableName = tableName;\n        config = Maps.fromProperties(conf.getAllProperties());\n    }\n\n    void insert(Multimap<Map<String, String>, HCatRecord> data) {\n        Iterator<Map<String, String>> iterator = data.keySet().iterator();\n        while (iterator.hasNext()) {\n            Map<String, String> partitionSpec = iterator.next();\n            insert(partitionSpec, data.get(partitionSpec));\n        }\n    }\n\n    private void insert(Map<String, String> partitionSpec, Iterable<HCatRecord> rows) {\n        WriteEntity entity = new WriteEntity.Builder()\n                .withDatabase(databaseName)\n                .withTable(tableName)\n                .withPartition(partitionSpec)\n                .build();\n\n        try {\n            HCatWriter master = DataTransferFactory.getHCatWriter(entity, config);\n            WriterContext context = master.prepareWrite();\n            HCatWriter writer = DataTransferFactory.getHCatWriter(context);\n            writer.write(rows.iterator());\n            master.commit(context);\n        } catch (HCatException e) {\n            throw new RuntimeException(\"An error occurred while inserting data to \" + databaseName + \".\" + tableName, e);\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/data/TsvFileParser.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\n\nimport org.apache.commons.lang3.ObjectUtils;\nimport org.apache.hive.hcatalog.data.schema.HCatSchema;\n\nimport com.google.common.base.Splitter;\n\n/**\n * A {@link FileParser} for parsing data out of a TSV file.\n */\npublic class TsvFileParser implements FileParser {\n\n    private static final String DEFAULT_DELIMITER = \"\\t\";\n    private static final String DEFAULT_NULL_VALUE = \"\";\n\n    private Splitter splitter;\n    private Object nullValue;\n    private Charset charset;\n    private boolean hasHeader;\n\n    public TsvFileParser() {\n        withDelimiter(DEFAULT_DELIMITER);\n        withNullValue(DEFAULT_NULL_VALUE);\n        withCharset(StandardCharsets.UTF_8);\n        withoutHeader();\n    }\n\n    /**\n     * Use the provided delimiter. The default is a tab.\n     */\n    public TsvFileParser withDelimiter(String delimiter) {\n        splitter = Splitter.on(delimiter);\n        return this;\n    }\n\n    /**\n     * Use the provided null value. When a column's value equals the null value it will be replaced with null. The default\n     * is an empty string.\n     */\n    public TsvFileParser withNullValue(Object nullValue) {\n        this.nullValue = nullValue;\n        return this;\n    }\n\n    /**\n     * Use the provided {@link Charset}. The default is UTF-8.\n     */\n    public TsvFileParser withCharset(Charset charset) {\n        this.charset = charset;\n        return this;\n    }\n\n    /**\n     * Enable if TSV file has header row. Default is false.\n     */\n    public TsvFileParser withHeader() {\n        this.hasHeader = true;\n        return this;\n    }\n\n    /**\n     * Enable if TSV file has header row. Default is false.\n     */\n    public TsvFileParser withoutHeader() {\n        this.hasHeader = false;\n        return this;\n    }\n\n\n    @Override\n    public List<Object[]> parse(File file, HCatSchema schema, List<String> names) {\n        try {\n            List<String> lines = Files.readAllLines(file.toPath(), charset);\n\n            if (this.hasHeader) {\n                lines = lines.subList(1, lines.size());\n            }\n\n            List<Object[]> records = new ArrayList<>(lines.size());\n            for (String line : lines) {\n                records.add(parseRow(line, names.size()));\n            }\n            return records;\n        } catch (IOException e) {\n            throw new RuntimeException(\"Error while reading file\", e);\n        }\n    }\n\n    @Override\n    public boolean hasColumnNames() {\n        return this.hasHeader;\n    }\n\n    @Override\n    public List<String> getColumnNames(File file) {\n        try {\n            String firstLine = Files.newBufferedReader(file.toPath(), charset).readLine();\n            List<String> columns = new ArrayList<>();\n            Iterator<String> iterator = splitter.split(firstLine).iterator();\n\n            while (iterator.hasNext()) {\n                String column = iterator.next();\n                columns.add(column);\n            }\n            return columns;\n        } catch (IOException e) {\n            throw new RuntimeException(\"Error while reading file\", e);\n        }\n    }\n\n    private Object[] parseRow(String line, int size) {\n        List<Object> row = new ArrayList<>(size);\n        Iterator<String> iterator = splitter.split(line).iterator();\n\n        for (int i = 0; i < size; i++) {\n            if (iterator.hasNext()) {\n                String column = iterator.next();\n                if (ObjectUtils.equals(nullValue, column)) {\n                    row.add(null);\n                } else {\n                    row.add(column);\n                }\n            } else {\n                throw new IllegalStateException(\"Not enough columns. Require \" + size + \" columns, got \" + i);\n            }\n        }\n\n        return row.toArray(new Object[size]);\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/io/IgnoreClosePrintStream.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.io;\n\nimport java.io.OutputStream;\nimport java.io.PrintStream;\n\npublic class IgnoreClosePrintStream extends PrintStream {\n\n    public IgnoreClosePrintStream(OutputStream out) {\n        super(out);\n    }\n\n    @Override\n    public void close() {\n        super.flush();\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/HiveRunnerStatement.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql;\n\nimport com.klarna.hiverunner.builder.Statement;\n\npublic class HiveRunnerStatement implements Statement {\n\n    private final int index;\n    private final String sql;\n\n    public HiveRunnerStatement(int index, String sql) {\n        this.index = index;\n        this.sql = sql;\n    }\n\n    @Override\n    public int getIndex() {\n        return index;\n    }\n\n    @Override\n    public String getSql() {\n        return sql;\n    }\n\n    @Override\n    public int hashCode() {\n        final int prime = 31;\n        int result = 1;\n        result = prime * result + index;\n        result = prime * result + ((sql == null) ? 0 : sql.hashCode());\n        return result;\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        if (this == obj)\n            return true;\n        if (obj == null)\n            return false;\n        if (getClass() != obj.getClass())\n            return false;\n        HiveRunnerStatement other = (HiveRunnerStatement) obj;\n        if (index != other.index)\n            return false;\n        if (sql == null) {\n            if (other.sql != null)\n                return false;\n        } else if (!sql.equals(other.sql))\n            return false;\n        return true;\n    }\n\n    @Override\n    public String toString() {\n        return \"HiveRunnerStatement [index=\" + index + \", sql=\" + sql + \"]\";\n    }\n\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/StatementLexer.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql;\n\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\n\npublic class StatementLexer {\n\n    private final Charset charset;\n    private final CommandShellEmulator commandShellEmulation;\n    private final Path cwd;\n\n    public StatementLexer(Path cwd, Charset charset, CommandShellEmulator commandShellEmulation) {\n        this.cwd = cwd;\n        this.charset = charset;\n        this.commandShellEmulation = commandShellEmulation;\n    }\n\n    private List<String> internalApplyToStatement(String statement) {\n        String transformedHiveSql = commandShellEmulation.preProcessor().statement(statement);\n        return commandShellEmulation.postProcessor(this).statement(transformedHiveSql);\n    }\n\n    public List<String> applyToScript(String script) {\n        List<String> hiveSqlStatements = new ArrayList<>();\n        List<Statement> statements = new StatementSplitter(commandShellEmulation)\n                .split(commandShellEmulation.preProcessor().script(script));\n        for (Statement statement : statements) {\n            hiveSqlStatements.addAll(internalApplyToStatement(statement.getSql()));\n        }\n        return hiveSqlStatements;\n    }\n\n    public List<String> applyToStatement(String statement) {\n        return internalApplyToStatement(statement);\n    }\n\n    public List<String> applyToPath(Path path) {\n        if (!path.isAbsolute()) {\n            path = cwd.resolve(path);\n        }\n        try {\n            String script = new String(Files.readAllBytes(path), charset);\n            return applyToScript(script);\n        } catch (IOException e) {\n            throw new IllegalArgumentException(\"Unable to read script file '\" + path + \"': \" + e.getMessage(), e);\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/AbstractImportPostProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Collections;\nimport java.util.List;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\n\n/**\n * An abstract {@link PostProcessor} implementation that recursively expands\n * import type commands such as Hive CLI's {@code SOURCE}, and Beeline's\n * {@code !run} commands.\n */\npublic abstract class AbstractImportPostProcessor implements PostProcessor {\n\n    private final StatementLexer lexer;\n\n    public AbstractImportPostProcessor(StatementLexer lexer) {\n        this.lexer = lexer;\n    }\n\n    @Override\n    public List<String> statement(String statement) {\n        if (isImport(statement)) {\n            String importPath = getImportPath(statement);\n            Path path = Paths.get(importPath);\n            return lexer.applyToPath(path);\n        }\n        return Collections.singletonList(statement);\n    }\n\n    public abstract String getImportPath(String statement);\n\n    public abstract boolean isImport(String statement);\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/CommandShellEmulator.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport java.util.List;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.split.TokenRule;\n\n/**\n * Attempt to accurately emulate the behaviours (good and bad) of different Hive\n * shells.\n */\npublic interface CommandShellEmulator {\n    PreProcessor preProcessor();\n\n    PostProcessor postProcessor(StatementLexer lexer);\n\n    String specialCharacters();\n\n    List<TokenRule> splitterRules();\n\n    String getName();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/CommandShellEmulatorFactory.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.PreV200HiveCliEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\npublic class CommandShellEmulatorFactory {\n\n    private CommandShellEmulatorFactory() {\n    }\n\n    public static CommandShellEmulator valueOf(String name) {\n        if (\"beeline\".equalsIgnoreCase(name.trim())) {\n            return BeelineEmulator.INSTANCE;\n        } else if (\"hive_cli\".equalsIgnoreCase(name.trim())) {\n            return HiveCliEmulator.INSTANCE;\n        } else if (\"hive_cli_pre_v200\".equalsIgnoreCase(name.trim())) {\n            return PreV200HiveCliEmulator.INSTANCE;\n        }\n        throw new IllegalArgumentException(\"Unsupported CLI: \" + name);\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/CommentUtil.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\n/** Utility methods for handling SQL comments. */\npublic final class CommentUtil {\n\n    private CommentUtil() {\n    }\n\n    public static String stripFullLineComments(String statement) {\n        StringBuilder newStatement = new StringBuilder(statement.length());\n        String[] lines = statement.split(\"\\n\");\n        for (int i = 0; i < lines.length; i++) {\n            String line = lines[i];\n            if (!line.trim().startsWith(\"--\")) {\n                newStatement.append(line);\n                if (i < lines.length - 1) {\n                    newStatement.append('\\n');\n                }\n            }\n        }\n        return newStatement.toString();\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/DefaultPreProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\n/**\n * A {@link PreProcessor} that strips comments from statements and scripts.\n */\npublic enum DefaultPreProcessor implements PreProcessor {\n    INSTANCE;\n\n    @Override\n    public String script(String script) {\n        return CommentUtil.stripFullLineComments(script);\n    }\n\n    @Override\n    public String statement(String statement) {\n        return CommentUtil.stripFullLineComments(statement);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/PostProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport java.util.List;\n\n/**\n * Allows the further processing of statements that have been extracted from a script.\n */\npublic interface PostProcessor {\n    public List<String> statement(String statement);\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/PreProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\n/** Allows preprocessing of raw script and statement text. */\npublic interface PreProcessor {\n    public String script(String script);\n\n    public String statement(String statement);\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/beeline/BeelineEmulator.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.DefaultPreProcessor;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.PostProcessor;\nimport com.klarna.hiverunner.sql.cli.PreProcessor;\nimport com.klarna.hiverunner.sql.split.CloseStatementRule;\nimport com.klarna.hiverunner.sql.split.DefaultTokenRule;\nimport com.klarna.hiverunner.sql.split.PreserveCommentsRule;\nimport com.klarna.hiverunner.sql.split.PreserveQuotesRule;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\nimport com.klarna.hiverunner.sql.split.TokenRule;\n\n/**\n * Emulates CLI behaviours specific to beeline. This includes interpretation of {@code !run} commands, and full line\n * comment handling.\n */\npublic enum BeelineEmulator implements CommandShellEmulator {\n    INSTANCE;\n\n    public static final String BEELINE_SPECIAL_CHARS = \"!\";\n\n    @Override\n    public PreProcessor preProcessor() {\n        return DefaultPreProcessor.INSTANCE;\n    }\n\n    @Override\n    public PostProcessor postProcessor(StatementLexer lexer) {\n        return new RunCommandPostProcessor(lexer);\n    }\n\n    @Override\n    public String getName() {\n        return \"BEELINE\";\n    }\n\n    @Override\n    public String specialCharacters() {\n        return StatementSplitter.SQL_SPECIAL_CHARS + BEELINE_SPECIAL_CHARS;\n    }\n\n    @Override\n    public List<TokenRule> splitterRules() {\n        // This order is important as rules may be progressively greedy. DefaultTokenRule will consume\n        // all tokens for example.\n        return Arrays.<TokenRule>asList(CloseStatementRule.INSTANCE, PreserveCommentsRule.INSTANCE,\n                PreserveQuotesRule.INSTANCE, SqlLineCommandRule.INSTANCE, DefaultTokenRule.INSTANCE);\n\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/beeline/RunCommandPostProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.AbstractImportPostProcessor;\nimport com.klarna.hiverunner.sql.cli.PostProcessor;\n\n/**\n * A {@link PostProcessor} that inlines external Hive SQL files referenced in\n * {@code !run} directives.\n */\nclass RunCommandPostProcessor extends AbstractImportPostProcessor {\n\n    private static final String TOKEN = \"!run\";\n\n    RunCommandPostProcessor(StatementLexer lexer) {\n        super(lexer);\n    }\n\n    @Override\n    public String getImportPath(String statement) {\n        // Beeline does not allow the filename to contain whitespace\n        String[] tokens = statement.trim().split(\" \");\n        if (tokens.length == 2) {\n            return tokens[1];\n        }\n        throw new IllegalArgumentException(\"Cannot get file to import from '\" + statement + \"'\");\n    }\n\n    @Override\n    public boolean isImport(String statement) {\n        // Beeline is case-sensitive; only accept lower case '!run'\n        return statement.trim().startsWith(TOKEN);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/beeline/SqlLineCommandRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport java.util.Collections;\nimport java.util.Set;\n\nimport com.klarna.hiverunner.sql.split.Consumer;\nimport com.klarna.hiverunner.sql.split.Context;\nimport com.klarna.hiverunner.sql.split.TokenRule;\n\n/**\n * A {@link TokenRule} that causes the splitter to capture beeline commands.\n * Effectively to differentiate between SQL's {@code NOT} operator and Beeline's command prefix.\n */\npublic enum SqlLineCommandRule implements TokenRule {\n    INSTANCE;\n\n    @Override\n    public Set<String> triggers() {\n        return Collections.singleton(\"!\");\n    }\n\n    @Override\n    public void handle(String token, Context context) {\n        if (context.statement().trim().isEmpty()) {\n            // This is a SqlLine command\n            context.append(token);\n            context.appendWith(Consumer.UNTIL_EOL);\n            context.flush();\n        } else {\n            // This is a '!' somewhere in the current statement\n            context.append(token);\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/hive/HiveCliEmulator.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.DefaultPreProcessor;\nimport com.klarna.hiverunner.sql.cli.PostProcessor;\nimport com.klarna.hiverunner.sql.cli.PreProcessor;\nimport com.klarna.hiverunner.sql.split.CloseStatementRule;\nimport com.klarna.hiverunner.sql.split.DefaultTokenRule;\nimport com.klarna.hiverunner.sql.split.PreserveCommentsRule;\nimport com.klarna.hiverunner.sql.split.PreserveQuotesRule;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\nimport com.klarna.hiverunner.sql.split.TokenRule;\n\n/**\n * Emulates CLI behaviours specific to the Hive CLI. This includes interpretation of {@code source} commands, and the\n * broken full line comment handling.\n */\npublic enum HiveCliEmulator implements CommandShellEmulator {\n    INSTANCE;\n\n    @Override\n    public PreProcessor preProcessor() {\n        return DefaultPreProcessor.INSTANCE;\n    }\n\n    @Override\n    public PostProcessor postProcessor(StatementLexer lexer) {\n        return new SourceCommandPostProcessor(lexer);\n    }\n\n    @Override\n    public String getName() {\n        return \"HIVE_CLI\";\n    }\n\n    @Override\n    public String specialCharacters() {\n        return StatementSplitter.SQL_SPECIAL_CHARS;\n    }\n\n    @Override\n    public List<TokenRule> splitterRules() {\n        // This order is important as rules may be progressively greedy. DefaultTokenRule will consume\n        // all tokens for example.\n        return Arrays.<TokenRule>asList(CloseStatementRule.INSTANCE, PreserveCommentsRule.INSTANCE,\n                PreserveQuotesRule.INSTANCE, DefaultTokenRule.INSTANCE);\n\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/hive/PreV200HiveCliEmulator.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport java.util.List;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.PostProcessor;\nimport com.klarna.hiverunner.sql.cli.PreProcessor;\nimport com.klarna.hiverunner.sql.split.TokenRule;\n\n/**\n * Emulates CLI behaviours specific to the Hive CLI. This includes\n * interpretation of {@code source} commands, and the broken full line comment\n * handling.\n */\npublic enum PreV200HiveCliEmulator implements CommandShellEmulator {\n    INSTANCE;\n\n    @Override\n    public PreProcessor preProcessor() {\n        return PreV200HiveCliPreProcessor.INSTANCE;\n    }\n\n    @Override\n    public PostProcessor postProcessor(StatementLexer lexer) {\n        return HiveCliEmulator.INSTANCE.postProcessor(lexer);\n    }\n\n    @Override\n    public String getName() {\n        return \"HIVE_CLI_PRE_V200\";\n    }\n\n    @Override\n    public String specialCharacters() {\n        return HiveCliEmulator.INSTANCE.specialCharacters();\n    }\n\n    @Override\n    public List<TokenRule> splitterRules() {\n        return HiveCliEmulator.INSTANCE.splitterRules();\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/hive/PreV200HiveCliPreProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport com.klarna.hiverunner.sql.cli.CommentUtil;\nimport com.klarna.hiverunner.sql.cli.PreProcessor;\n\n/**\n * A {@link PreProcessor} that strips comments from scripts only, replicating\n * Hive CLI's broken functionality present in versions <2.0.0. This is described\n * in <a href=\"https://issues.apache.org/jira/browse/HIVE-8396\">HIVE-8396</a>.\n * <p>\n * Full line comments are stripped from script files as is the case with both\n * {@code hive -f} and {@code beeline -f}. The implementations provided here\n * replicate these modes of operation.\n * </p>\n */\nenum PreV200HiveCliPreProcessor implements PreProcessor {\n    INSTANCE;\n\n    @Override\n    public String script(String script) {\n        return CommentUtil.stripFullLineComments(script);\n    }\n\n    @Override\n    public String statement(String statement) {\n        return statement;\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/cli/hive/SourceCommandPostProcessor.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.AbstractImportPostProcessor;\nimport com.klarna.hiverunner.sql.cli.PostProcessor;\n\n/**\n * A {@link PostProcessor} that inlines external Hive SQL files referenced in\n * {@code SOURCE} directives.\n */\nclass SourceCommandPostProcessor extends AbstractImportPostProcessor {\n\n    private static final String TOKEN = \"source\";\n\n    SourceCommandPostProcessor(StatementLexer lexer) {\n        super(lexer);\n    }\n\n    @Override\n    public String getImportPath(String statement) {\n        // everything after 'source' (trimmed) is considered the filename\n        return statement.trim().substring(TOKEN.length()).trim();\n    }\n\n    @Override\n    public boolean isImport(String statement) {\n        // case-insensitive\n        return statement.trim().toLowerCase().startsWith(TOKEN);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/BaseContext.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static com.klarna.hiverunner.sql.split.NewLineUtil.removeLeadingTrailingNewLines;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.StringTokenizer;\n\n/** Base {@link Context} implementation. */\nclass BaseContext implements Context {\n\n    private final StringTokenizer tokenizer;\n    private final List<String> statements = new ArrayList<>();\n    private String statement = \"\";\n\n    BaseContext(StringTokenizer tokenizer) {\n        this.tokenizer = tokenizer;\n    }\n\n    @Override\n    public void flush() {\n        if (!statement.trim().isEmpty()) {\n            statements.add(removeLeadingTrailingNewLines(statement));\n        }\n        statement = \"\";\n    }\n\n    @Override\n    public String statement() {\n        return statement;\n    }\n\n    @Override\n    public StringTokenizer tokenizer() {\n        return tokenizer;\n    }\n\n    @Override\n    public void append(String chars) {\n        statement += chars;\n    }\n\n    @Override\n    public void appendWith(Consumer consumer) {\n        append(consumer.consume(this));\n    }\n\n    public List<String> getStatements() {\n        return statements;\n    }\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/CloseStatementRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.Collections;\nimport java.util.Set;\n\n/** A {@link TokenRule} for handling statement terminating characters. */\npublic enum CloseStatementRule implements TokenRule {\n    INSTANCE;\n\n    @Override\n    public Set<String> triggers() {\n        return Collections.singleton(\";\");\n    }\n\n    @Override\n    public void handle(String token, Context context) {\n        // Only add statement that is not empty\n        context.flush();\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/Consumer.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.StringTokenizer;\n\n/**\n * Provide a means to direct the {@link StatementSplitter} in how it should consume tokens.\n */\npublic interface Consumer {\n\n    String consume(Context context);\n\n    /** A {@link Consumer} that consumes tokens until the end of the line. */\n    public static Consumer UNTIL_EOL = new Consumer() {\n\n        @Override\n        public String consume(Context context) {\n            StringBuilder builder = new StringBuilder();\n            StringTokenizer tokenizer = context.tokenizer();\n            while (tokenizer.hasMoreElements()) {\n                builder.append(tokenizer.nextElement());\n                if (builder.charAt(builder.length() - 1) == '\\n') {\n                    break;\n                }\n            }\n            return builder.toString();\n        }\n\n    };\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/Context.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.StringTokenizer;\n\n/**\n * Provides a means to modify and inspect the state of the parsing and splitting\n * of a script.\n */\npublic interface Context {\n    StringTokenizer tokenizer();\n\n    String statement();\n\n    void append(String chars);\n\n    void appendWith(Consumer consumer);\n\n    void flush();\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/DefaultTokenRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.Collections;\nimport java.util.Set;\n\n/** A {@link TokenRule} for handling general characters. */\npublic enum DefaultTokenRule implements TokenRule {\n    INSTANCE;\n\n    @Override\n    public Set<String> triggers() {\n        return Collections.emptySet();\n    }\n\n    @Override\n    public void handle(String token, Context context) {\n        context.append(token);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/NewLineUtil.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static java.lang.Character.isWhitespace;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Set;\n\n/**\n * Removes all white space up to and including the newlines closest to the a sequence of non whitespace characters. The\n * aim here is to preserve the indentation of statements within scripts.\n */\nenum NewLineUtil {\n\n    INSTANCE;\n\n    private static final Set<Character> LINE_BREAKS = new HashSet<>(Arrays.<Character>asList('\\n', '\\r', '\\f'));\n\n    static String removeLeadingTrailingNewLines(String in) {\n        int leadingBreakPosition = -1;\n        for (int i = 0; i < in.length(); i++) {\n            char c = in.charAt(i);\n            if (!isWhitespace(c)) {\n                break;\n            }\n            if (LINE_BREAKS.contains(c)) {\n                leadingBreakPosition = i;\n            }\n        }\n\n        int trailingBreakPosition = -1;\n        for (int i = in.length() - 1; i >= 0; i--) {\n            char c = in.charAt(i);\n            if (!isWhitespace(c)) {\n                break;\n            }\n            if (LINE_BREAKS.contains(c)) {\n                trailingBreakPosition = i;\n            }\n        }\n        if (trailingBreakPosition == -1) {\n            trailingBreakPosition = in.length();\n        }\n        leadingBreakPosition++;\n        if (leadingBreakPosition >= trailingBreakPosition + 1) {\n            return \"\";\n        }\n        return in.substring(leadingBreakPosition, trailingBreakPosition);\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/PreserveCommentsRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.Collections;\nimport java.util.Set;\nimport java.util.regex.Pattern;\n\n/** A {@link TokenRule} for handling comments. */\npublic enum PreserveCommentsRule implements TokenRule {\n    INSTANCE;\n\n    static final Pattern START_OF_COMMENT_PATTERN = Pattern.compile(\".*\\\\s--\", Pattern.DOTALL);\n\n    @Override\n    public Set<String> triggers() {\n        return Collections.singleton(\"-\");\n    }\n\n    @Override\n    public void handle(String token, Context context) {\n        context.append(token);\n        if (START_OF_COMMENT_PATTERN.matcher(context.statement()).matches()) {\n            context.appendWith(Consumer.UNTIL_EOL);\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/PreserveQuotesRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.Set;\nimport java.util.regex.Pattern;\n\n/** A {@link TokenRule} for handling quoted character sequences. */\npublic enum PreserveQuotesRule implements TokenRule {\n    INSTANCE;\n\n    private static final Pattern LAST_CHAR_NOT_ESCAPED_PATTERN = Pattern.compile(\".*[^\\\\\\\\].\", Pattern.DOTALL);\n\n    @Override\n    public Set<String> triggers() {\n        return new HashSet<>(Arrays.asList(\"\\\"\", \"'\"));\n    }\n\n    @Override\n    public void handle(final String token, Context context) {\n        context.appendWith(new QuotedStringConsumer(token));\n    }\n\n    static class QuotedStringConsumer implements Consumer {\n\n        private final String token;\n\n        QuotedStringConsumer(String token) {\n            this.token = token;\n        }\n\n        @Override\n        public String consume(Context context) {\n            String quotedString = token;\n            while (context.tokenizer().hasMoreElements()) {\n                quotedString += (String) context.tokenizer().nextElement();\n                // If the last char is an end of quote token and it was not\n                // escaped by the previous token, we break.\n                if (quotedString.endsWith(token) && LAST_CHAR_NOT_ESCAPED_PATTERN.matcher(quotedString).matches()) {\n                    break;\n                }\n            }\n            return quotedString;\n        }\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/StatementSplitter.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.StringTokenizer;\n\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.sql.HiveRunnerStatement;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\n\n/**\n * Splits script text into statements according to a\n * {@link CommandShellEmulator}.\n */\npublic class StatementSplitter {\n\n    public static final String SQL_SPECIAL_CHARS = \";\\\"'-\\n\\r\\f\";\n\n    private final List<TokenRule> rules;\n    private final String specialChars;\n\n    public StatementSplitter(CommandShellEmulator emulator) {\n        this(emulator.splitterRules(), emulator.specialCharacters());\n    }\n\n    /**\n     * @param rules Order of rules defines processing precedence. \n     */\n    public StatementSplitter(List<TokenRule> rules, String specialChars) {\n        this.rules = rules;\n        this.specialChars = specialChars;\n    }\n\n    public List<Statement> split(String expression) {\n        StringTokenizer tokenizer = new StringTokenizer(expression, specialChars, true);\n        BaseContext context = new BaseContext(tokenizer);\n        while (tokenizer.hasMoreElements()) {\n            String token = (String) tokenizer.nextElement();\n            for (TokenRule rule : rules) {\n                if (rule.triggers().contains(token) || rule.triggers().isEmpty()) {\n                    rule.handle(token, context);\n                    break;\n                }\n            }\n        }\n\n        // Only add statement that is not empty\n        context.flush();\n\n        List<Statement> hiveRunnerStatements = new ArrayList<>();\n        int index = 0;\n        for (String statement : context.getStatements()) {\n            hiveRunnerStatements.add(new HiveRunnerStatement(index++, statement));\n        }\n\n        return hiveRunnerStatements;\n    }\n\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/hiverunner/sql/split/TokenRule.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport java.util.Set;\n\n/** Allows the implementation of splitting rules based on specific tokens. */\npublic interface TokenRule {\n    Set<String> triggers();\n\n    void handle(String token, Context context);\n}\n"
  },
  {
    "path": "src/main/java/com/klarna/reflection/ReflectionUtils.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.reflection;\n\nimport com.google.common.base.Optional;\nimport com.google.common.base.Preconditions;\nimport com.google.common.base.Predicate;\nimport com.google.common.collect.Iterables;\n\nimport javax.annotation.Nullable;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Modifier;\nimport java.util.Set;\n\nimport static com.google.common.collect.Lists.newArrayList;\n\n/**\n * Collection of Reflection related helper functions.\n */\npublic final class ReflectionUtils {\n\n    /**\n     * Private constructor\n     */\n    private ReflectionUtils() {\n    }\n\n    public static void setStaticField(Class clazz, String fieldName, Object value) {\n        setField(clazz, null, fieldName, value);\n    }\n\n    public static void setField(Object instance, String fieldName, Object value) {\n        setField(instance.getClass(), instance, fieldName, value);\n    }\n\n    private static void setField(Class clazz, Object instance, String fieldName, Object value) {\n        try {\n            final Optional<Field> optional = getField(clazz, fieldName);\n            Preconditions.checkArgument(optional.isPresent(), \"Failed to set field '\" + fieldName + \"' on '\" + instance);\n            final Field field = optional.get();\n\n            boolean accessible = field.isAccessible();\n            field.setAccessible(true);\n            field.set(instance, value);\n            field.setAccessible(accessible);\n        } catch (IllegalAccessException e) {\n            throw new IllegalStateException(\n                    \"Failed to set field '\" + fieldName + \"' on '\" + instance + \"': \" + e.getMessage(), e);\n        }\n    }\n\n    /**\n     * Finds the first Field with given field name in the Class and in its super classes.\n     *\n     * @param type      The Class type\n     * @param fieldName The field name to get\n     * @return an {@code Optional}. Use isPresent() to find out if the field name was found.\n     */\n    public static Optional<Field> getField(Class<?> type, final String fieldName) {\n        Optional<Field> field = Iterables.tryFind(newArrayList(type.getDeclaredFields()), havingFieldName(fieldName));\n\n        if (!field.isPresent() && type.getSuperclass() != null) {\n            field = getField(type.getSuperclass(), fieldName);\n        }\n\n        return field;\n    }\n\n    public static Set<Field> getAllFields(Class aClass, Predicate<? super Field> predicate) {\n        return org.reflections.ReflectionUtils.getAllFields(aClass, predicate);\n    }\n\n    public static <T> T getFieldValue(Object testCase, String name, Class<T> type) {\n        return getFieldValue(testCase, testCase.getClass(), name, type, false);\n    }\n\n    public static <T> T getStaticFieldValue(Class testCaseClass, String name, Class<T> type) {\n        return getFieldValue(null, testCaseClass, name, type, true);\n    }\n\n    private static <T> T getFieldValue(Object testCase, Class testCaseClass, String name, Class<T> type, boolean isStatic) {\n        try {\n            Field field = testCaseClass.getDeclaredField(name);\n            boolean accessible = field.isAccessible();\n\n            Preconditions.checkState(field.getType().isAssignableFrom(type), \"Field %s must be assignable from \", type);\n            Preconditions.checkState(!isStatic || Modifier.isStatic(field.getModifiers()), \"Field %s must be static \", field);\n\n            field.setAccessible(true);\n            Object value = field.get(testCase);\n            field.setAccessible(accessible);\n            return (T) value;\n        } catch (NoSuchFieldException e) {\n            throw new IllegalArgumentException(\n                    \"Failed to lookup field '\" + name + \"' for '\" + testCaseClass + \"': \" + e.getMessage(), e);\n        } catch (IllegalAccessException e) {\n            throw new IllegalArgumentException(\n                    \"Failed to get value of field '\" + name + \"' for '\" + testCaseClass + \"': \" + e.getMessage(), e);\n        }\n    }\n\n    public static boolean isOfType(Field setupScriptField, Class type) {\n        return setupScriptField.getType().isAssignableFrom(type);\n    }\n\n\n    private static Predicate<Field> havingFieldName(final String fieldName) {\n        return new Predicate<Field>() {\n            @Override\n            public boolean apply(@Nullable Field field) {\n                return fieldName.equals(field.getName());\n            }\n        };\n    }\n\n}\n"
  },
  {
    "path": "src/main/license/APACHE-2.txt",
    "content": "Copyright (C) 2013-2021 Klarna AB\nCopyright (C) ${license.git.copyrightYears} ${owner}\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/AggregateViewTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.nio.file.Paths;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class AggregateViewTest {\n\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    /**\n     * Adding unit test to check that issue#70 (https://github.com/klarna/HiveRunner/issues/70) doesn't happen anymore.\n     * This bug is solved when upgrading HiveRunner to version 4.0.0 or above (most likely due to move from Hive 1.x to 2.x).\n     */\n    @Test\n    public void aggregateView() {\n        this.shell.execute(Paths.get(\"src/test/resources/AggregateViewTest/create_table.sql\"));\n        shell.insertInto(\"db\", \"mvtdescriptionchangeinfo\").addRow(\"123\", \"testname\", \"REMOVED\", \"contents of test...\", \"hostname\", \"6/21/17\", \"20\").commit();\n        List<String> result = shell.executeQuery(\"SELECT * FROM db.latesttestchangepairs\");\n        List<String> expected = Arrays.asList(\"testname\\tREMOVED\");\n        assertThat(result, is(expected));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/AnnotatedBaseTestClass.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic abstract class AnnotatedBaseTestClass {\n    @HiveSQL(files = {})\n    protected HiveShell shell;\n\n    @BeforeEach\n    public void setup() {\n        shell.execute(\"create database test_db\");\n\n        shell.execute(new StringBuilder()\n                .append(\"create table test_db.test_table (\")\n                .append(\"c0 string\")\n                .append(\")\")\n                .toString());\n\n        shell.insertInto(\"test_db\", \"test_table\")\n                .addRow(\"v1\")\n                .commit();\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/AnnotatedFieldsInSuperClassTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport org.junit.jupiter.api.Test;\n\npublic class AnnotatedFieldsInSuperClassTest extends AnnotatedBaseTestClass {\n    @Test\n    public void testShellInitializedInAbstractTestClass() {\n        shell.executeQuery(\"select * from test_db.test_table\");\n    }\n}\n\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/BeelineRunTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.io.File;\nimport java.io.PrintStream;\nimport java.util.List;\n\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class BeelineRunTest {\n\n    private static final String TEST_DB = \"test_db\";\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {\n        {\n            setCommandShellEmulator(BeelineEmulator.INSTANCE);\n        }\n    };\n\n    @Rule\n    public TemporaryFolder temp = new TemporaryFolder();\n\n    @HiveSQL(files = {}, encoding = \"UTF-8\", autoStart = false)\n    private HiveShell hiveCliShell;\n\n    @Test\n    public void testNestedImport() throws Exception {\n        File a = new File(temp.getRoot(), \"a.sql\");\n        try (PrintStream out = new PrintStream(a)) {\n            // single statement case\n            out.println(\"create view ${db}.a as select * from ${db}.src where c1 <> 'z'\");\n        }\n\n        File b = new File(temp.getRoot(), \"b.sql\");\n        try (PrintStream out = new PrintStream(b)) {\n            // multi statement case with script import\n            out.println(\"!run a.sql\");\n            out.println(\"create database db_b;\");\n            out.println(\"create view db_b.b as select c0, count(*) as c1_cnt from ${db}.a group by c0;\");\n        }\n\n        File c = new File(temp.getRoot(), \"c.sql\");\n        try (PrintStream out = new PrintStream(c)) {\n            // multi statement case\n            out.println(\"create database db_c;\");\n            out.println(\"create view db_c.c as select * from db_b.b where c1_cnt > 1;\");\n        }\n\n        File main = new File(temp.getRoot(), \"main.sql\");\n        try (PrintStream out = new PrintStream(main)) {\n            // multi import case\n            out.println(\"!run b.sql\");\n            out.println(\"!run c.sql\");\n        }\n\n        hiveCliShell.setHiveVarValue(\"db\", TEST_DB);\n        hiveCliShell.setCwd(temp.getRoot().toPath());\n        hiveCliShell.start();\n        hiveCliShell.execute(new StringBuilder()\n                .append(\"create database ${db};\")\n                .append(\"create table ${db}.src (\")\n                .append(\"c0 string, \")\n                .append(\"c1 string\")\n                .append(\");\")\n                .toString());\n        hiveCliShell.insertInto(TEST_DB, \"src\")\n                .addRow(\"A\", \"x\")\n                .addRow(\"A\", \"y\")\n                .addRow(\"B\", \"z\")\n                .addRow(\"B\", \"y\")\n                .addRow(\"C\", \"z\")\n                .commit();\n\n\n        hiveCliShell.execute(main);\n\n        List<String> results = hiveCliShell.executeQuery(\"select * from db_c.c\");\n        assertThat(results.size(), is(1));\n        assertThat(results.get(0), is(\"A\\t2\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/BigResultSetTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.UUID;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class BigResultSetTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell hiveShell;\n\n\n    /**\n     * This test verifies that we can fetch more than 100 rows of data from hive.\n     * This test was added due to tests failing with result sets bigger than 100 rows.\n     */\n    @Test\n    public void bigResultSetTest() throws IOException {\n        hiveShell.setHiveConfValue(\"location\", \"${hiveconf:hadoop.tmp.dir}/foo\");\n        hiveShell.addSetupScript(\"CREATE table FOO (s String) LOCATION '${hiveconf:location}'\");\n        OutputStream ros = hiveShell.getResourceOutputStream(\"${hiveconf:location}/foo.data\");\n\n        List<String> rows = new ArrayList<>();\n\n        for (int i = 0; i < 1099; i++) {\n            String row = UUID.randomUUID().toString();\n            rows.add(row);\n            ros.write((row + \"\\n\").getBytes());\n        }\n\n        hiveShell.start();\n\n        Assertions.assertEquals(rows, hiveShell.executeQuery(\"select * from FOO\"));\n\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/CommentTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class CommentTest {\n    @HiveSQL(files = {\"CommentTest/comment.sql\"})\n    public HiveShell hiveShell;\n\n    @Test\n    public void testPreceedingFullLineComment() {\n        List<String> results = hiveShell.executeQuery(\"set x\");\n        assertEquals(Arrays.asList(\"x=1\"), results);\n    }\n\n    @Test\n    public void testFullLineCommentInsideDeclaration() {\n        List<String> results = hiveShell.executeQuery(\"set y\");\n        assertEquals(Arrays.asList(\"y=\\\"\", \"\\\"\"), results);\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/CtasTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class CtasTest {\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/data.csv\")\n    private String data = \"A,B\\nC,D\\nE,F\";\n\n    @HiveSQL(files = {\"CtasTest/ctas.sql\"})\n    private HiveShell hiveShell;\n\n    @Test\n    public void tablesShouldBeCreated() {\n        List<String> expected = Arrays.asList(\"foo\", \"foo_prim\");\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void verifyThatDataIsAvailableInCtas() {\n        List<String> expected = Arrays.asList(\"A\\tB\", \"C\\tD\", \"E\\tF\");\n        List<String> actual = hiveShell.executeQuery(\"select * from foo_prim\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void testCountCtas() {\n        List<String> expected = Arrays.asList(\"3\");\n        List<String> actual = hiveShell.executeQuery(\"select count(*) from foo_prim\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/DisabledTimeoutTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class DisabledTimeoutTest {\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setTimeoutEnabled(false);\n        setTimeoutSeconds(5);\n    }};\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @Test\n    public void finishAfterTimeoutTest() throws InterruptedException {\n        Thread.sleep(6 * 1000);\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ExecuteFileBasedScriptIntegrationTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport static com.google.common.base.Charsets.UTF_8;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.PrintStream;\nimport java.nio.charset.Charset;\nimport java.util.List;\n\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class ExecuteFileBasedScriptIntegrationTest {\n\n    @Rule\n    public TemporaryFolder temp = new TemporaryFolder();\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @Test\n    public void testExecuteFileBasedScript() throws IOException {\n        File hqlScriptFile = temp.newFile(\"get_current_database.hql\");\n\n        try (PrintStream out = new PrintStream(hqlScriptFile)) {\n            out.println(\"select current_database(), NULL, 100;\");\n        }\n\n        hiveShell.execute(hqlScriptFile);\n\n        Charset optionalCharset = UTF_8;\n        List<String> results = hiveShell.executeQuery(optionalCharset, hqlScriptFile, \" optional_column_delimiter \", \"optional_null_replacement\");\n\n        assertThat(results.size(), is(1));\n        assertThat(results.get(0), is(\"default optional_column_delimiter optional_null_replacement optional_column_delimiter 100\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ExecuteScriptIntegrationTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.PrintStream;\nimport java.util.List;\n\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class ExecuteScriptIntegrationTest {\n\n    @Rule\n    public TemporaryFolder temp = new TemporaryFolder();\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @Test\n    public void testInsertRowWithExecuteScript() throws IOException {\n        File file = new File(temp.getRoot(), \"insert_data.hql\");\n\n        try (PrintStream out = new PrintStream(file)) {\n            out.println(\"create database test_db;\");\n            out.println(\"create table test_db.test_table (\");\n            out.println(\"  c0 string\");\n            out.println(\")\");\n            out.println(\"stored as orc;\");\n            out.println(\"insert into table test_db.test_table values ('v1');\");\n        }\n\n        hiveShell.execute(file);\n\n        List<String> result = hiveShell.executeQuery(\"select c0 from test_db.test_table\");\n\n        assertThat(result.size(), is(1));\n        assertThat(result.get(0), is(\"v1\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveCliSourceTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.io.File;\nimport java.io.PrintStream;\nimport java.util.List;\n\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class HiveCliSourceTest {\n\n    private static final String TEST_DB = \"test_db\";\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {\n        {\n            setCommandShellEmulator(HiveCliEmulator.INSTANCE);\n        }\n    };\n\n    @Rule\n    public TemporaryFolder temp = new TemporaryFolder();\n\n    @HiveSQL(files = {}, encoding = \"UTF-8\", autoStart = false)\n    private HiveShell hiveCliShell;\n\n    @Test\n    public void testNestedImport() throws Exception {\n        File a = new File(temp.getRoot(), \"a.hql\");\n        try (PrintStream out = new PrintStream(a)) {\n            // single statement case\n            out.println(\"create view ${db}.a as select * from ${db}.src where c1 <> 'z'\");\n        }\n\n        File b = new File(temp.getRoot(), \"b.hql\");\n        try (PrintStream out = new PrintStream(b)) {\n            // multi statement case with script import\n            out.println(\"source a.hql;\");\n            out.println(\"create database db_b;\");\n            out.println(\"create view db_b.b as select c0, count(*) as c1_cnt from ${db}.a group by c0;\");\n        }\n\n        File c = new File(temp.getRoot(), \"c.hql\");\n        try (PrintStream out = new PrintStream(c)) {\n            // multi statement case\n            out.println(\"create database db_c;\");\n            out.println(\"create view db_c.c as select * from db_b.b where c1_cnt > 1;\");\n        }\n\n        File main = new File(temp.getRoot(), \"main.hql\");\n        try (PrintStream out = new PrintStream(main)) {\n            // multi import case\n            out.println(\"source b.hql;\");\n            out.println(\"source\\nc.hql\\n;\");\n        }\n\n        hiveCliShell.setHiveVarValue(\"db\", TEST_DB);\n        hiveCliShell.setCwd(temp.getRoot().toPath());\n        hiveCliShell.start();\n        hiveCliShell.execute(new StringBuilder()\n                .append(\"create database ${db};\")\n                .append(\"create table ${db}.src (\")\n                .append(\"c0 string, \")\n                .append(\"c1 string\")\n                .append(\");\")\n                .toString());\n        hiveCliShell.insertInto(TEST_DB, \"src\")\n                .addRow(\"A\", \"x\")\n                .addRow(\"A\", \"y\")\n                .addRow(\"B\", \"z\")\n                .addRow(\"B\", \"y\")\n                .addRow(\"C\", \"z\")\n                .commit();\n\n\n        hiveCliShell.execute(main);\n\n        List<String> results = hiveCliShell.executeQuery(\"select * from db_c.c\");\n        assertThat(results.size(), is(1));\n        assertThat(results.get(0), is(\"A\\t2\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveRunnerAnnotationsTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveProperties;\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.apache.commons.collections.MapUtils;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.File;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.collection.IsArrayContaining.hasItemInArray;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class HiveRunnerAnnotationsTest {\n\n    @HiveSetupScript\n    private File setupFile = new File(ClassLoader.getSystemResource(\"HiveRunnerAnnotationsTest/setupFile.csv\").getPath());\n\n    @HiveSetupScript\n    private Path setupPath = Paths.get(ClassLoader.getSystemResource(\"HiveRunnerAnnotationsTest/setupPath.csv\").getPath());\n\n\n    @HiveSetupScript\n    private String setup = \"create table bar (i int);\";\n\n    @HiveProperties\n    private Map<String, String> props = MapUtils.putAll(new HashMap(), new Object[]{\n            \"key1\", \"value1\",\n            \"key2\", \"value2\"\n    });\n\n    @HiveSQL(files = {\"HiveRunnerAnnotationsTest/hql1.sql\"}, autoStart = false)\n    private HiveShell hiveShell;\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/fromString.csv\")\n    public String dataFromString = \"1,B\\n2,D\\nE,F\";\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/fromFile.csv\")\n    public File dataFromFile = new File(ClassLoader.getSystemResource(\"HiveRunnerAnnotationsTest/testData.csv\").getPath());\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/fromPath.csv\")\n    public Path dataFromPath = Paths.get(ClassLoader.getSystemResource(\"HiveRunnerAnnotationsTest/testData2.csv\").getPath());\n\n    @BeforeEach\n    public void setup() {\n        hiveShell.start();\n    }\n\n    @Test\n    public void testHiveSQLLoaded() {\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        String[] actualArray = actual.toArray(new String[0]);\n        assertThat(actualArray, hasItemInArray(\"bar\"));\n    }\n\n    @Test\n    public void testSetupScript() {\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        String[] actualArray = actual.toArray(new String[0]);\n        assertThat(actualArray, hasItemInArray(\"foo\"));\n    }\n\n    @Test\n    public void testSetupScriptFromFile() {\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        String[] actualArray = actual.toArray(new String[0]);\n        assertThat(actualArray, hasItemInArray(\"fox\"));\n    }\n\n    @Test\n    public void testSetupScriptFromPath() {\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        String[] actualArray = actual.toArray(new String[0]);\n        assertThat(actualArray, hasItemInArray(\"love\"));\n    }\n\n\n    @Test\n    public void testPropertiesLoaded() {\n        Assertions.assertEquals(\"value1\", hiveShell.getHiveConf().get(\"key1\"));\n        Assertions.assertEquals(\"value2\", hiveShell.getHiveConf().get(\"key2\"));\n    }\n\n    @Test\n    public void testLoadStringResources() {\n        String[] actual = hiveShell.executeQuery(\"select * from foo\").toArray(new String[0]);\n\n        assertThat(actual, hasItemInArray(\"1\\tB\"));\n        assertThat(actual, hasItemInArray(\"2\\tD\"));\n        assertThat(actual, hasItemInArray(\"NULL\\tF\"));\n    }\n\n    @Test\n    public void testLoadFileResources() {\n        String[] actual = hiveShell.executeQuery(\"select * from foo\").toArray(new String[0]);\n        assertThat(actual, hasItemInArray(\"5\\tF\"));\n        assertThat(actual, hasItemInArray(\"7\\tW\"));\n    }\n\n    @Test\n    public void testLoadPathResources() {\n        String[] actual = hiveShell.executeQuery(\"select * from foo\").toArray(new String[0]);\n        assertThat(actual, hasItemInArray(\"8\\tT\"));\n        assertThat(actual, hasItemInArray(\"10\\tQ\"));\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveRunnerExtensionTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.junit.Assert.assertThat;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class HiveRunnerExtensionTest {\n\n    @HiveSQL(files = {\"HiveRunnerExtensionTest/test_query.sql\"})\n    private HiveShell shell;\n\n    @Test\n    public void shellFindFiles() {\n        shell.insertInto(\"testdb\", \"test_table\").addRow(\"v1\", \"v2\").commit();\n        List<String> actual = shell.executeQuery(\"select * from testdb.test_table\");\n        List<String> expected = Arrays.asList(\"v1\\tv2\");\n        assertThat(actual, is(expected));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveServerContainerTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport java.io.IOException;\nimport java.io.PrintStream;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.HashMap;\nimport java.util.List;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.hive.service.cli.HiveSQLException;\nimport org.junit.jupiter.api.AfterEach;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\n\npublic class HiveServerContainerTest {\n\n    private Path basedir;\n    private HiveServerContainer container;\n\n    @BeforeEach\n    public void setup() throws IOException {\n        basedir = Files.createTempDirectory(\"HiveServerContainerTest\");\n        StandaloneHiveServerContext context = new StandaloneHiveServerContext(basedir, new HiveRunnerConfig());\n        context.getHiveConf().setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);\n        container = new HiveServerContainer(context);\n        container.init(new HashMap<>(), new HashMap<>());\n    }\n\n    @AfterEach\n    public void tearDown() {\n        container.tearDown();\n    }\n\n    @Test\n    public void testGetBasedir() {\n        Assertions.assertEquals(basedir.getRoot(), container.getBaseDir().getRoot());\n    }\n\n    @Test\n    public void testExecuteStatementMR() {\n        List<Object[]> actual = container.executeStatement(\"show databases\");\n        Assertions.assertEquals(1, actual.size());\n        Assertions.assertArrayEquals(new Object[]{\"default\"}, actual.get(0));\n    }\n\n    @Test\n    public void testExecuteStatementTez() {\n        List<Object[]> actual = container.executeStatement(\"show databases\");\n        Assertions.assertEquals(1, actual.size());\n        Assertions.assertArrayEquals(new Object[]{\"default\"}, actual.get(0));\n    }\n\n    @Test\n    public void testExecuteStatementOutputStreamReset() {\n        PrintStream initialPrintStream = System.out;\n        container.executeStatement(\"show databases\");\n        Assertions.assertEquals(initialPrintStream, System.out);\n    }\n\n    @Test\n    public void testExecuteStatementOutputStreamResetIfException() {\n        PrintStream initialPrintStream = System.out;\n        try {\n            container.executeStatement(\"use non-existent\");\n            Assertions.fail(\"Exception should be thrown\");\n        } catch (IllegalArgumentException e) {\n            Assertions.assertEquals(initialPrintStream, System.out);\n        }\n    }\n\n    @Test\n    public void testTearDownShouldNotThrowException() {\n        container.tearDown();\n        container.tearDown();\n        container.tearDown();\n    }\n\n    @Test\n    public void testInvalidQuery() throws Throwable {\n        try {\n            container.executeStatement(\"use foo\");\n        } catch (IllegalArgumentException e) {\n            Assertions.assertThrows(HiveSQLException.class, () -> {\n                throw e.getCause();\n            });\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveShellBeeLineEmulationTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class HiveShellBeeLineEmulationTest {\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setCommandShellEmulator(BeelineEmulator.INSTANCE);\n    }};\n\n    @HiveSQL(files = {}, encoding = \"UTF-8\")\n    private HiveShell beeLineShell;\n\n    /** Failure described in HIVE-8396 should be avoided for beeline. */\n    @Test\n    public void testQueryStripFullLineCommentFirstLine() {\n        beeLineShell.executeQuery(\"-- a\\nset x=1\");\n        List<String> results = beeLineShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    /** Beeline strips comment before assignment. */\n    @Test\n    public void testQueryStripFullLineCommentNested() {\n        beeLineShell.executeQuery(\"set x=\\n-- a\\n1\");\n        List<String> results = beeLineShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testQueryStripFullLineComment() {\n        Assertions.assertThrows(IllegalArgumentException.class, () -> beeLineShell.executeQuery(\"-- a\"));\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentFirstLine() {\n        beeLineShell.execute(\"-- a\\nset x=1;\");\n        List<String> results = beeLineShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentLastLine() {\n        beeLineShell.execute(\"set x=1;\\n-- a\");\n        List<String> results = beeLineShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testScriptStripFullLineComment() {\n        beeLineShell.execute(\"-- a\");\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentNested() {\n        beeLineShell.execute(\"set x=\\n-- a\\n1;\");\n        List<String> results = beeLineShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveShellHiveCliEmulationTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class HiveShellHiveCliEmulationTest {\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setCommandShellEmulator(HiveCliEmulator.INSTANCE);\n    }};\n\n    @HiveSQL(files = {}, encoding = \"UTF-8\")\n    private HiveShell hiveCliShell;\n\n    /** Does not exhibit the behaviour described in HIVE-8396. */\n    @Test\n    public void testQueryStripFullLineCommentFirstLine() {\n        hiveCliShell.executeQuery(\"-- a\\nset x=1\");\n    }\n\n    /** Does not exhibit the behaviour described in HIVE-8396. */\n    @Test\n    public void testQueryStripFullLineCommentNested() {\n        hiveCliShell.executeQuery(\"set x=\\n-- a\\n1\");\n        List<String> results = hiveCliShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testQueryStripFullLineComment() {\n        Assertions.assertThrows(IllegalArgumentException.class, () -> hiveCliShell.executeQuery(\"-- a\"));\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentFirstLine() {\n        hiveCliShell.execute(\"-- a\\nset x=1;\");\n        List<String> results = hiveCliShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentLastLine() {\n        hiveCliShell.execute(\"set x=1;\\n-- a\");\n        List<String> results = hiveCliShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n    @Test\n    public void testScriptStripFullLineComment() {\n        hiveCliShell.execute(\"-- a\");\n    }\n\n    @Test\n    public void testScriptStripFullLineCommentNested() {\n        hiveCliShell.execute(\"set x=\\n-- a\\n1;\");\n        List<String> results = hiveCliShell.executeQuery(\"set x\");\n        assertThat(results, is(Arrays.asList(\"x=1\")));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/HiveVariablesTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.contrib.java.lang.system.EnvironmentVariables;\nimport org.junit.runner.RunWith;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class HiveVariablesTest {\n\n    @Rule\n    public final EnvironmentVariables environmentVariables = new EnvironmentVariables();\n\n    @HiveSQL(files = {}, autoStart = false)\n    public HiveShell shell;\n\n    @Test\n    public void substitutedVariablesShouldBeExpanded() {\n        shell.setHiveConfValue(\"origin\", \"spanish\");\n        shell.start();\n\n        Assertions.assertEquals(\"The spanish fox\", shell.expandVariableSubstitutes(\"The ${hiveconf:origin} fox\"));\n    }\n\n    @Test\n    public void nestedSubstitutesShouldBeExpanded() {\n        shell.setHiveVarValue(\"origin\", \"${hiveconf:origin2}\");\n        shell.setHiveConfValue(\"origin2\", \"spanish\");\n        shell.setHiveConfValue(\"animal\", \"fox\");\n        shell.setHiveConfValue(\"origin_animal\", \"${hivevar:origin} ${hiveconf:animal}\");\n        shell.setHiveConfValue(\"substitute\", \"origin_animal\");\n        shell.start();\n\n        Assertions.assertEquals(\"The spanish fox\",\n                shell.expandVariableSubstitutes(\"The ${hiveconf:${hiveconf:substitute}}\"));\n    }\n\n    @Test\n    @SuppressWarnings(\"deprecation\")\n    public void nestedSubstitutesShouldBeExpandedUsingDeprecatedSetProperty() {\n        shell.setHiveVarValue(\"origin\", \"${hiveconf:origin2}\");\n        shell.setProperty(\"origin2\", \"spanish\");\n        shell.setProperty(\"animal\", \"fox\");\n        shell.setProperty(\"origin_animal\", \"${hivevar:origin} ${hiveconf:animal}\");\n        shell.setProperty(\"substitute\", \"origin_animal\");\n        shell.start();\n\n        Assertions.assertEquals(\"The spanish fox\",\n                shell.expandVariableSubstitutes(\"The ${hiveconf:${hiveconf:substitute}}\"));\n    }\n\n    @Test\n    public void unexpandableSubstitutesShouldNotBeExpanded() {\n        shell.setHiveConfValue(\"origin\", \"spanish\");\n        shell.start();\n        Assertions.assertEquals(\"The spanish ${hiveconf:animal}\",\n                shell.expandVariableSubstitutes(\"The ${hiveconf:origin} ${hiveconf:animal}\"));\n    }\n\n    @Test\n    public void testHiveVarCli() {\n        shell.addSetupScript(\"set hivevar:foobar=fox\");\n        shell.start();\n        Assertions.assertEquals(\"fox love fox\", shell.expandVariableSubstitutes(\"${hivevar:foobar} love ${foobar}\"));\n    }\n\n    @Test\n    public void testHiveVar() {\n        shell.setHiveVarValue(\"foobar\", \"fox\");\n        shell.start();\n        Assertions.assertEquals(\"fox love fox\", shell.expandVariableSubstitutes(\"${hivevar:foobar} love ${foobar}\"));\n    }\n\n    @Test\n    public void testSystemVar() {\n        System.setProperty(\"foo\", \"dog\");\n        System.setProperty(\"bar\", \"nice\");\n        shell.start();\n        shell.execute(\"Create database ${system:bar}${system:foo}\");\n        Assertions.assertEquals(\"nice dog\", shell.expandVariableSubstitutes(\"${system:bar} ${system:foo}\"));\n    }\n\n    @Test\n    public void testEnvironmentVar() {\n        environmentVariables.set(\"foo\", \"dog\");\n        environmentVariables.set(\"bar\", \"nice\");\n        shell.start();\n        shell.execute(\"Create database ${env:bar}${env:foo}\");\n        Assertions.assertEquals(\"nice dog\", shell.expandVariableSubstitutes(\"${env:bar} ${env:foo}\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/InsertIntoTableIntegrationTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.data.TsvFileParser;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class InsertIntoTableIntegrationTest {\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @BeforeEach\n    public void before() {\n        hiveShell.execute(\"create database test_db\");\n    }\n\n    @Test\n    public void insertDataIntoOrcPartitionedTable() {\n        testInsertDataIntoPartitionedTable(\"orc\");\n    }\n\n    @Test\n    public void insertDataIntoTextPartitionedTable() {\n        testInsertDataIntoPartitionedTable(\"textfile\");\n    }\n\n    @Test\n    public void insertDataIntoSequenceFilePartitionedTable() {\n        testInsertDataIntoPartitionedTable(\"sequencefile\");\n    }\n\n    private void testInsertDataIntoPartitionedTable(String storedAs) {\n        hiveShell\n                .execute(new StringBuilder()\n                        .append(\"create table test_db.test_table (\")\n                        .append(\"c0 string\")\n                        .append(\")\")\n                        .append(\"partitioned by (c1 string)\")\n                        .append(\"stored as \" + storedAs)\n                        .toString());\n\n        hiveShell\n                .insertInto(\"test_db\", \"test_table\")\n                .addRow(\"v1\", \"p1\")\n                .addRow(\"v2\", \"p1\")\n                .addRow(\"v3\", \"p2\")\n                .addRow(\"v4\", \"p2\")\n                .commit();\n\n        List<Object[]> result = hiveShell.executeStatement(\"select * from test_db.test_table\");\n\n        assertEquals(4, result.size());\n\n        assertArrayEquals(new Object[]{\"v1\", \"p1\"}, result.get(0));\n        assertArrayEquals(new Object[]{\"v2\", \"p1\"}, result.get(1));\n        assertArrayEquals(new Object[]{\"v3\", \"p2\"}, result.get(2));\n        assertArrayEquals(new Object[]{\"v4\", \"p2\"}, result.get(3));\n    }\n\n    @Test\n    public void insertDataIntoTablePrimitiveParsedStrings() {\n        hiveShell\n                .execute(new StringBuilder()\n                        .append(\"create table test_db.test_table (\")\n                        .append(\"c0 string,\")\n                        .append(\"c1 boolean,\")\n                        .append(\"c2 tinyint,\")\n                        .append(\"c3 smallint,\")\n                        .append(\"c4 int,\")\n                        .append(\"c5 bigint,\")\n                        .append(\"c6 float,\")\n                        .append(\"c7 double,\")\n                        .append(\"c8 date,\")\n                        .append(\"c9 timestamp,\")\n                        .append(\"c10 binary,\")\n                        .append(\"c11 decimal(3,2),\")\n                        .append(\"c12 varchar(1),\")\n                        .append(\"c13 char(1)\")\n                        .append(\")\")\n                        .append(\"stored as orc\")\n                        .toString());\n\n        hiveShell\n                .insertInto(\"test_db\", \"test_table\")\n                .newRow()\n                .set(\"c0\", \"foo\")\n                .set(\"c1\", \"true\")\n                .set(\"c2\", \"0\")\n                .set(\"c3\", \"1\")\n                .set(\"c4\", \"2\")\n                .set(\"c5\", \"3\")\n                .set(\"c6\", \"1.1\")\n                .set(\"c7\", \"2.2\")\n                .set(\"c8\", \"2015-10-15\")\n                .set(\"c9\", \"2015-10-15 23:59:59.999\")\n                .set(\"c10\", \"0,1,2\")\n                .set(\"c11\", \"1.234\")\n                .set(\"c12\", \"ab\")\n                .set(\"c13\", \"cd\")\n                .commit();\n\n        List<Object[]> result = hiveShell.executeStatement(\"select * from test_db.test_table\");\n\n        assertEquals(1, result.size());\n\n        Object[] row = result.get(0);\n        assertEquals(\"foo\", row[0]);\n        assertEquals(true, row[1]);\n        assertEquals((byte) 0, row[2]);\n        assertEquals((short) 1, row[3]);\n        assertEquals(2, row[4]);\n        assertEquals(3L, row[5]);\n        assertEquals(1.1D, (double) row[6], 0.0001D);\n        assertEquals(2.2D, (double) row[7], 0.0001D);\n        assertEquals(\"2015-10-15\", row[8]);\n        assertEquals(\"2015-10-15 23:59:59.999\", row[9]);\n        assertArrayEquals(new byte[]{0, 1, 2}, (byte[]) row[10]);\n        assertEquals(\"1.23\", row[11]);\n        assertEquals(\"a\", row[12]);\n        assertEquals(\"c\", row[13]);\n    }\n\n    @Test\n    public void insertsDataFromTsvFileIntoOrcTable() throws IOException {\n        File dataFile = new File(\"src/test/resources/InsertIntoTableIntegrationTest/data.tsv\");\n        hiveShell\n                .execute(new StringBuilder()\n                        .append(\"create table test_db.test_table (\")\n                        .append(\"a string,\")\n                        .append(\"b string,\")\n                        .append(\"c string,\")\n                        .append(\"d string,\")\n                        .append(\"e string\")\n                        .append(\")\")\n                        .append(\"stored as orc\")\n                        .toString());\n        hiveShell.insertInto(\"test_db\", \"test_table\").withAllColumns().addRowsFromTsv(dataFile).commit();\n        List<Object[]> result = hiveShell.executeStatement(\"select * from test_db.test_table\");\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", \"e1\"}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", \"c2\", \"d2\", \"e2\"}, result.get(1));\n\n    }\n\n    @Test\n    public void insertsDataFromTsvFileWithCustomDelimiterAndNullValue() throws IOException {\n        File dataFile = new File(\"src/test/resources/InsertIntoTableIntegrationTest/dataWithCustomNullValue.csv\");\n        hiveShell\n                .execute(new StringBuilder()\n                        .append(\"create table test_db.test_table (\")\n                        .append(\"a string,\")\n                        .append(\"b string,\")\n                        .append(\"c string,\")\n                        .append(\"d string,\")\n                        .append(\"e string\")\n                        .append(\")\")\n                        .append(\"stored as orc\")\n                        .toString());\n        hiveShell.insertInto(\"test_db\", \"test_table\").withAllColumns().addRowsFromDelimited(dataFile, \",\", \"NULL\").commit();\n        List<Object[]> result = hiveShell.executeStatement(\"select * from test_db.test_table\");\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", null}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", null, \"d2\", \"e2\"}, result.get(1));\n    }\n\n    @Test\n    public void insertsDataFromFileWithCustomStrategy() throws IOException {\n        File dataFile = new File(\"src/test/resources/InsertIntoTableIntegrationTest/dataWithCustomNullValue.csv\");\n        hiveShell\n                .execute(new StringBuilder()\n                        .append(\"create table test_db.test_table (\")\n                        .append(\"a string,\")\n                        .append(\"b string,\")\n                        .append(\"c string,\")\n                        .append(\"d string,\")\n                        .append(\"e string\")\n                        .append(\")\")\n                        .append(\"stored as orc\")\n                        .toString());\n        hiveShell\n                .insertInto(\"test_db\", \"test_table\")\n                .withAllColumns()\n                .addRowsFrom(dataFile, new TsvFileParser().withDelimiter(\",\").withNullValue(\"NULL\"))\n                .commit();\n        List<Object[]> result = hiveShell.executeStatement(\"select * from test_db.test_table\");\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", null}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", null, \"d2\", \"e2\"}, result.get(1));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/IntegerPartitionFormatTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class IntegerPartitionFormatTest {\n\n\n    @HiveSQL(files = {})\n    public HiveShell hiveShell;\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/month=07/foo.data\")\n    public String data = \"06\\n6\";\n\n    @HiveSetupScript\n    public String setup =\n            \"CREATE EXTERNAL TABLE foo (id int)\" +\n                    \"  PARTITIONED BY(month int)\" +\n                    \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n                    \"  STORED AS TEXTFILE\" +\n                    \"  LOCATION '${hiveconf:hadoop.tmp.dir}/foo';\";\n\n    @BeforeEach\n    public void repair() {\n        // MSCK REPAIR TABLE adds metadata about partitions to the Hive metastore for\n        // partitions for which such metadata doesn't already exist\n        hiveShell.execute(\"set hive.mv.files.thread=0\");\n        hiveShell.execute(\"MSCK REPAIR TABLE foo\");\n    }\n\n    @Test\n    public void testInteger() {\n        Assertions.assertEquals(Arrays.asList(\"6\\t7\", \"6\\t7\"), hiveShell.executeQuery(\"select * from foo where id = 6\"));\n    }\n\n    @Test\n    public void testPrefixedInteger() {\n        Assertions.assertEquals(Arrays.asList(\"6\\t7\", \"6\\t7\"), hiveShell.executeQuery(\"select * from foo where id = 06\"));\n    }\n\n\n    @Test\n    public void testPrefixedPartitionInteger() {\n        Assertions.assertEquals(Arrays.asList(\"6\\t7\", \"6\\t7\"), hiveShell.executeQuery(\"select * from foo where id = 6 and month = 07\"));\n    }\n\n\n    @Test\n    public void testNonPrefixedPartitionInteger() {\n        Assertions.assertEquals(Arrays.asList(\"6\\t7\", \"6\\t7\"), hiveShell.executeQuery(\"select * from foo where id = 6 and month = 7\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/InteractiveHiveShellTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.google.common.io.Files;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.Assert;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.UUID;\n\n@RunWith(StandaloneHiveRunner.class)\npublic class InteractiveHiveShellTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n    @Rule\n    public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n    @Test\n    public void setupScriptShouldBeExecuted() {\n        shell.addSetupScript(\"create database foo;\");\n        shell.start();\n        List<String> actual = shell.executeQuery(\"show databases\");\n        List<String> expected = Arrays.asList(\"default\", \"foo\");\n        Assert.assertEquals(new HashSet<>(expected), new HashSet<>(actual));\n    }\n\n    @Test\n    public void setupScriptsShouldBeExecuted() throws IOException {\n        shell.addSetupScripts(\n                createFileBasedScript(\"create database foo;\"),\n                createFileBasedScript(\"create table foo.bar(id int);\"));\n        shell.start();\n\n        List<String> actual = shell.executeQuery(\"show databases\");\n        List<String> expected = Arrays.asList(\"default\", \"foo\");\n        Assert.assertEquals(new HashSet<>(expected), new HashSet<>(actual));\n\n        List<String> actualTable = shell.executeQuery(\"show tables in foo\");\n        List<String> expectedTable = Arrays.asList(\"bar\");\n        Assert.assertEquals(new HashSet<>(expectedTable), new HashSet<>(actualTable));\n    }\n\n    @Test\n    public void setupScriptsShouldBeExecutedInOrder() throws IOException {\n        shell.addSetupScripts(createFileBasedScript(\"create database foo;\"));\n        shell.addSetupScript(\"use foo;\");\n        shell.addSetupScripts(createFileBasedScript(\"create table bar(id int)\"));\n        shell.start();\n\n        List<String> actualTable = shell.executeQuery(\"show tables in foo\");\n        List<String> expectedTable = Arrays.asList(\"bar\");\n        Assert.assertEquals(new HashSet<>(expectedTable), new HashSet<>(actualTable));\n    }\n\n    private File createFileBasedScript(String script) throws IOException {\n        File file = temporaryFolder.newFile(UUID.randomUUID().toString() + \".sql\");\n        Files.write(script, file, Charset.defaultCharset());\n        return file;\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/LeftOuterJoinTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class LeftOuterJoinTest {\n\n    private final String hdfsSourceFoo = \"${hiveconf:hadoop.tmp.dir}/foo\";\n    private final String hdfsSourceBar = \"${hiveconf:hadoop.tmp.dir}/bar\";\n\n    @HiveSetupScript\n    String setup =\n            \"  CREATE TABLE foo (\" +\n                    \" id string,\" +\n                    \" value string\" +\n                    \"  )\" +\n                    \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'\" +\n                    \"  STORED AS TEXTFILE\" +\n                    \"  LOCATION '\" + hdfsSourceFoo + \"' ; \"\n                    +\n                    \"  CREATE TABLE bar (\" +\n                    \" id string,\" +\n                    \" value string\" +\n                    \"  )\" +\n                    \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'\" +\n                    \"  STORED AS TEXTFILE\" +\n                    \"  LOCATION '\" + hdfsSourceBar + \"' ;\" +\n                    \"\";\n\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell hiveShell;\n\n\n    @Test\n    public void leftOuterJoin() {\n        hiveShell.addResource(hdfsSourceFoo + \"/data.csv\",\n                \"id1\\tfoo_value1\\nid3\\tfoo_value3\");\n        hiveShell.addResource(hdfsSourceBar + \"/data.csv\",\n                \"id1\\tbar_value1\\n\" +\n                        \"id2\\tbar_value2\");\n        hiveShell.start();\n\n        String query = \"SELECT foo.id, bar.value FROM foo left outer join bar on (foo.id = bar.id)\";\n\n        List<String> expected = Arrays.asList(\"id1\\tbar_value1\", \"id3\\tNULL\");\n        List<String> actual = hiveShell.executeQuery(query);\n\n        Assertions.assertEquals(expected, actual);\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/MSCKRepairNpeTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class MSCKRepairNpeTest {\n\n    @HiveSQL(files = {})\n    public HiveShell hiveShell;\n\n    @Test\n    public void testMsckRepair() {\n        hiveShell.execute(\"set hive.mv.files.thread=0\");\n\n        hiveShell.execute(\"CREATE EXTERNAL TABLE foo (id int)\" +\n                \"  PARTITIONED BY(month int)\" +\n                \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n                \"  STORED AS TEXTFILE\" +\n                \"  LOCATION '${hiveconf:hadoop.tmp.dir}/foo';\");\n\n\n        // This will throw a NPE in Hive 2.1.0/2.2.0 (See https://issues.apache.org/jira/browse/HIVE-14798 and https://issues.apache.org/jira/browse/HIVE-14924) \n        hiveShell.execute(\"MSCK REPAIR TABLE foo\");\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/MacroTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class MacroTest {\n\n\n    @HiveSQL(files = {})\n    public HiveShell hiveShell;\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/foo.data\")\n    public String data = \"easteregg\";\n\n    @HiveSetupScript\n    public String setup =\n            \"CREATE TABLE corpus (stanza string)\" +\n                    \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n                    \"  STORED AS TEXTFILE\" +\n                    \"  LOCATION '${hiveconf:hadoop.tmp.dir}/foo';\";\n\n    @HiveSetupScript\n    public String macro =\n            \"CREATE TEMPORARY MACRO foobarize (literal string) \" +\n                    \"concat('foo', concat(literal, 'bar'));\";\n\n    @Test\n    public void testMacro() {\n        Assertions.assertEquals(Arrays.asList(\"fooeastereggbar\"), hiveShell.executeQuery(\"select foobarize(stanza) from corpus\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/MethodLevelResourceTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.google.common.io.Resources;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.File;\nimport java.net.URISyntaxException;\nimport java.util.Arrays;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class MethodLevelResourceTest {\n\n    @HiveSetupScript\n    String createTable = \"CREATE EXTERNAL TABLE foo (i INT, j INT, k INT)\" +\n            \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n            \"  STORED AS TEXTFILE\" +\n            \"  LOCATION '${hiveconf:hadoop.tmp.dir}'\";\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell hiveShell;\n\n    @Test()\n    public void resourceLoadingAsStringTest() {\n\n        hiveShell.addResource(\"${hiveconf:hadoop.tmp.dir}/data.csv\", \"1,2,3\");\n        hiveShell.start();\n\n        Assertions.assertEquals(Arrays.asList(\"1\\t2\\t3\"), hiveShell.executeQuery(\"SELECT * FROM foo\"));\n    }\n\n    @Test()\n    public void resourceLoadingAsFileTest() throws URISyntaxException {\n\n        hiveShell.addResource(\"${hiveconf:hadoop.tmp.dir}/data.csv\",\n                new File(Resources.getResource(\"MethodLevelResourceTest/MethodLevelResourceTest.txt\").toURI()));\n\n        hiveShell.start();\n        Assertions.assertEquals(Arrays.asList(\"1\\t2\\t3\"), hiveShell.executeQuery(\"SELECT * FROM foo\"));\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/MultipleExecutionEnginesTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\nimport java.util.Arrays;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class MultipleExecutionEnginesTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    public HiveShell shell;\n\n\n    @Test\n    public void test() throws IOException {\n        shell.getResourceOutputStream(\"${hiveconf:hadoop.tmp.dir}/foo/data.txt\").write(\"a,b,c\\nd,e,f\".getBytes());\n        shell.addSetupScript(\n                \"create external table foo (s1 string, s2 string, s3 string) \" +\n                        \"ROW FORMAT DELIMITED \" +\n                        \"FIELDS TERMINATED BY ',' \" +\n                        \"LOCATION '${hiveconf:hadoop.tmp.dir}/foo/'\");\n        shell.start();\n\n        Assertions.assertEquals(Arrays.asList(\"a\\tb\\tc\", \"d\\te\\tf\"), shell.executeQuery(\"select * from foo\"));\n\n        shell.execute(\"set hive.tez.container.size=512\");\n        shell.execute(\"set hive.execution.engine=tez\");\n        Assertions.assertEquals(Arrays.asList(\"2\"), shell.executeQuery(\"select count(1) from foo\"));\n\n        shell.execute(\"set hive.execution.engine=mr\");\n        Assertions.assertEquals(Arrays.asList(\"2\"), shell.executeQuery(\"select count(1) from foo\"));\n\n        shell.execute(\"set hive.execution.engine=tez\");\n        Assertions.assertEquals(Arrays.asList(\"2\"), shell.executeQuery(\"select count(1) from foo\"));\n\n        shell.execute(\"set hive.execution.engine=mr\");\n        Assertions.assertEquals(Arrays.asList(\"2\"), shell.executeQuery(\"select count(1) from foo\"));\n\n\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/NeverEndingUdf.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport org.apache.hadoop.hive.ql.exec.UDF;\nimport org.apache.hadoop.io.Text;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.security.SecureRandom;\n\n\npublic class NeverEndingUdf extends UDF {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(NeverEndingUdf.class);\n\n    public Text evaluate(Text value) {\n        LOGGER.warn(\"Entering infinite loop\");\n        while (true) {\n            LOGGER.debug(\"Looping and generating random seed: {}\",\n                    new SecureRandom(value.copyBytes()).generateSeed(12332123));\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/NoTimeoutTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class NoTimeoutTest {\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setTimeoutEnabled(false);\n        setTimeoutSeconds(5);\n        setTimeoutRetries(2);\n    }};\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @BeforeEach\n    public void prepare() {\n        String disableTimeout = System.getProperty(\"disableTimeout\");\n        if (disableTimeout != null && Boolean.parseBoolean(disableTimeout)) {\n            System.out.println(\"Terminating test with success because timeout is disabled.\");\n        } else {\n            hiveShell.execute(\"create database baz\");\n            hiveShell.execute(\"use baz\");\n            hiveShell.execute(\"create temporary function sleep_one_second_udf as 'com.klarna.hiverunner.SlowlyFailingUdf'\");\n            hiveShell.execute(\"create table foo (bar string)\");\n            hiveShell.execute(\"insert into table foo values ('a')\");\n        }\n    }\n\n    /**\n     * Regression test for deadlock in ThrowOnTimeout.java that occured when running with long running test case and disabled timeout.\n     *\n     * If the deadlock is introduced, this test will never terminate.\n     */\n    @Test\n    public void test() {\n        Assertions.assertThrows(IllegalArgumentException.class, () -> hiveShell.executeQuery(\"select sleep_one_second_udf(bar) from foo\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/OrcSnappyTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Disabled;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class OrcSnappyTest {\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/foo/data.csv\")\n    private String data = \"A,B\\nC,D\\nE,F\";\n\n    @HiveSQL(files = {\"OrcSnappyTest/ctas.sql\"})\n    private HiveShell hiveShell;\n\n    @Test\n    public void tablesShouldBeCreated() {\n        List<String> expected = Arrays.asList(\"foo\", \"foo_orc_nocomp\", \"foo_orc_snappy\");\n        List<String> actual = hiveShell.executeQuery(\"show tables\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void verifyThatDataIsAvailableInOrcNocomp() {\n        List<String> expected = Arrays.asList(\"A\\tB\", \"C\\tD\", \"E\\tF\");\n        List<String> actual = hiveShell.executeQuery(\"select * from foo_orc_nocomp\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void verifyThatDataIsAvailableInOrcSnappy() {\n        List<String> expected = Arrays.asList(\"A\\tB\", \"C\\tD\", \"E\\tF\");\n        List<String> actual = hiveShell.executeQuery(\"select * from foo_orc_snappy\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Disabled\n    // Fails with java.lang.UnsatisfiedLinkError: org.apache.hadoop.util.NativeCodeLoader.buildSupportsSnappy()Z\n    @Test\n    public void testCountOrcNocomp() {\n        List<String> expected = Arrays.asList(\"3\");\n        List<String> actual = hiveShell.executeQuery(\"select count(*) from foo_orc_nocomp\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Disabled\n    // Fails with java.lang.UnsatisfiedLinkError: org.apache.hadoop.util.NativeCodeLoader.buildSupportsSnappy()Z\n    @Test\n    public void testCountOrcSnappy() {\n        List<String> expected = Arrays.asList(\"3\");\n        List<String> actual = hiveShell.executeQuery(\"select count(*) from foo_orc_snappy\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ParquetInsertionTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\n\n/**\n * Methods that set up data in HiveRunner use HCatalog, which initially did not support writing to Parquet files.\n *\n * A version of HCatalog with this functionality working was introduced in Hive 3.\n * It was also subsequently back-ported to Hive 2.3.7, which is used in HiveRunner >= 5.2.0.\n *\n * This test validates that Parquet insertion is now possible. It has been verified to fail on HiveRunner <= 5.1.x.\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class ParquetInsertionTest {\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    private static final String TABLE_NAME = \"parquet_test_table\";\n\n    @HiveSetupScript\n    private static final String CREATE_TABLE_SCRIPT = \"CREATE TABLE \" + TABLE_NAME + \" (col1 string) STORED AS PARQUET;\";\n\n    @Test\n    public void testCanInsertToParquetTable() {\n        String textValue = \"Some text value\";\n        hiveShell.insertInto(\"default\", TABLE_NAME).addRow(textValue).commit();\n        Assertions.assertEquals(hiveShell.executeQuery(\"SELECT col1 FROM \" + TABLE_NAME), Arrays.asList(textValue));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/PartitionSupportTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveProperties;\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.apache.commons.collections.MapUtils;\nimport org.apache.thrift.TException;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.Map;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class PartitionSupportTest {\n\n    private final String tableName = \"foo_bar\";\n\n    @HiveResource(targetFile = \"${hiveconf:HDFS_ROOT_FOO}/foo/year=2013/month=11/data.csv\")\n    public String data1 = \"a,b,c\\nf,g,h\\nt,j,k\";\n\n    @HiveResource(targetFile = \"${hiveconf:HDFS_ROOT_FOO}/foo/year=2012/month=02/data.csv\")\n    public String data2 = \"q,w,e\\nr,t,y\\nu,i,o\";\n\n\n    @HiveProperties\n    public Map<String, String> hiveProperties = MapUtils.putAll(new HashMap(), new String[]{\n            \"table.name\", tableName,\n            \"HDFS_ROOT_FOO\", \"${hiveconf:hadoop.tmp.dir}\"\n    });\n\n    @HiveSQL(files = \"PartitionSupportTest/hql_example.sql\")\n    public HiveShell hiveShell;\n\n\n    @BeforeEach\n    public void repairPartitions() {\n        // TODO: Incorporate support for REPAIR TABLE in HiveRunner fwk.\n        // if new partitions are directly added to HDFS the metastore is not aware of these partitions.\n        // 'MSCK REPAIR TABLE table' adds metadata about partitions to the Hive metastore for partitions\n        // for which such metadata doesn't already exist.\n        hiveShell.execute(\"MSCK REPAIR TABLE ${hiveconf:table.name}\");\n    }\n\n\n    @Test\n    public void testSelectMax() throws TException, IOException {\n        Assertions.assertEquals(\n                Arrays.asList(\"11\"),\n                hiveShell.executeQuery(String.format(\"select max(month) from %s\", tableName)));\n\n        Assertions.assertEquals(\n                Arrays.asList(\"2\"),\n                hiveShell.executeQuery(String.format(\"select min(month) from %s\", tableName)));\n    }\n\n    @Test\n    public void testShowTables() {\n        Assertions.assertEquals(Arrays.asList(tableName), hiveShell.executeQuery(\"SHOW TABLES\"));\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ReservedKeywordTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Disabled;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class ReservedKeywordTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell hiveShell;\n\n\n    /**\n     * As of Hive 1.2 there are a number of new reserved keywords, e.g. date, timestamp and update.\n     * This test verifies that we still can have backwards compatibility by setting the HiveConf\n     * 'hive.support.sql11.reserved.keywords' to false.\n     */\n    @Test\n    @Disabled(\"Since Hive 2.3.0 this property is no longer available in hive see https://issues.apache.org/jira/browse/HIVE-14872, use backticks\")\n    public void reservedKeywordsShouldBeAllowedWhenHiveConfIsSet() throws IOException {\n\n        hiveShell.setHiveConfValue(\"hive.support.sql11.reserved.keywords\", \"false\");\n        hiveShell.addSetupScript(\"CREATE table FOO (date String, timestamp string, update string)\");\n\n        hiveShell.start();\n\n    }\n\n    /**\n     * As of Hive 1.2 there are a number of new reserved keywords, e.g. date, timestamp and update.\n     * This test verifies that we still can use the identifier by adding a backtick quote.\n     */\n    @Test\n    public void reservedKeywordsShouldBeAllowedWhenIdentifierHasBacktickQuote() throws IOException {\n\n        hiveShell.addSetupScript(\"CREATE table FOO (`date` String, `timestamp` string, `update` string)\");\n\n        hiveShell.start();\n\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ResourceOutputStreamTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.io.NullWritable;\nimport org.apache.hadoop.io.SequenceFile;\nimport org.apache.hadoop.io.Text;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.Arrays;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class ResourceOutputStreamTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n    @Test\n    public void writeShouldOnlyBeAllowedBeforeStartHasBeenCalled() throws IOException {\n\n        OutputStream resourceOutputStream =\n                shell.getResourceOutputStream(\"${hiveconf:hadoop.tmp.dir}/baz/foo.bar\");\n\n        shell.start();\n\n        Assertions.assertThrows(IllegalStateException.class, () -> resourceOutputStream.write(\"Foo\\nBar\\nBaz\".getBytes()));\n\n    }\n\n    @Test\n    public void itShouldBePossibleToAddAResourceByOutputStream() throws IOException {\n\n        OutputStream resourceOutputStream =\n                shell.getResourceOutputStream(\"${hiveconf:hadoop.tmp.dir}/baz/foo.bar\");\n\n        resourceOutputStream.write(\"Foo\\nBar\\nBaz\".getBytes());\n\n        shell.addSetupScript(\"\" +\n                \"create table foobar(str string) \" +\n                \"location '${hiveconf:hadoop.tmp.dir}/baz'\");\n\n        shell.start();\n\n        Assertions.assertEquals(Arrays.asList(\"Foo\", \"Bar\", \"Baz\"), shell.executeQuery(\"select * from foobar\"));\n    }\n\n    @Test\n    public void sequenceFile() throws IOException {\n\n        OutputStream resourceOutputStream =\n                shell.getResourceOutputStream(\"${hiveconf:hadoop.tmp.dir}/baz/foo.bar\");\n\n        SequenceFile.Writer sequenceFileWriter = createSequenceFileWriter(resourceOutputStream);\n\n        sequenceFileWriter.append(NullWritable.get(), new Text(\"Foo\"));\n        sequenceFileWriter.append(NullWritable.get(), new Text(\"Bar\"));\n        sequenceFileWriter.append(NullWritable.get(), new Text(\"\\\\N\"));\n        sequenceFileWriter.append(NullWritable.get(), new Text(\"Baz\"));\n\n        shell.addSetupScript(\"\" +\n                \"create table foobar(str string) \" +\n                \"STORED AS SEQUENCEFILE \" +\n                \"location '${hiveconf:hadoop.tmp.dir}/baz'\");\n\n        shell.start();\n\n        Assertions.assertEquals(Arrays.asList(\"Foo\", \"Bar\", \"_NULL_\", \"Baz\"),\n                shell.executeQuery(\"select * from foobar\", \"\\t\", \"_NULL_\"));\n    }\n\n\n    private SequenceFile.Writer createSequenceFileWriter(OutputStream resourceOutputStream) throws IOException {\n        return SequenceFile.createWriter(new Configuration(),\n                SequenceFile.Writer.stream(new FSDataOutputStream(resourceOutputStream, null)),\n                SequenceFile.Writer.keyClass(NullWritable.class),\n                SequenceFile.Writer.valueClass(Text.class));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SchemaResetBetweenTestMethodsTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\n/**\n * Verifies that the database is reset to 'default' in tear down phase.\n * It seems like Hive (at least 0.11) has some static variable to store\n * represent the current set database.\n * <p/>\n * This is solved by doing a 'use default' in {@link com.klarna.hiverunner.builder.HiveShellTearable#tearDown()}\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class SchemaResetBetweenTestMethodsTest {\n\n\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @Test\n    public void createDatabaseBar() {\n        // Create a table. It is assumed that the current database is 'default'\n        hiveShell.execute(\"create table baz (i int)\");\n        // If the current database was not 'default', this row would throw an exception\n        hiveShell.execute(\"select * from default.baz\");\n\n        // Create any database and set it to current so that the other\n        // test case may verify that it was indeed reset to 'default' at teardown.\n        hiveShell.execute(\"create database bar\");\n        hiveShell.execute(\"USE bar\");\n\n        List<String> expectedDatabases = Arrays.asList(\"bar\", \"default\");\n        List<String> actualDatabases = hiveShell.executeQuery(\"show databases\");\n\n        Collections.sort(expectedDatabases);\n        Collections.sort(actualDatabases);\n\n        Assertions.assertEquals(expectedDatabases, actualDatabases);\n\n    }\n\n    @Test\n    public void createDatabaseFoo() {\n        // See comments in test case above\n        hiveShell.execute(\"create table baz (i int)\");\n        hiveShell.execute(\"select * from default.baz\");\n        hiveShell.execute(\"create database foo\");\n        hiveShell.execute(\"USE foo\");\n\n        List<String> expectedDatabases = Arrays.asList(\"foo\", \"default\");\n        List<String> actualDatabases = hiveShell.executeQuery(\"show databases\");\n\n        Collections.sort(expectedDatabases);\n        Collections.sort(actualDatabases);\n\n        Assertions.assertEquals(expectedDatabases, actualDatabases);\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SerdeTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.apache.thrift.TException;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.IOException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class SerdeTest {\n\n    private final String hdfsSource = \"${hiveconf:hadoop.tmp.dir}/serde\";\n\n    @HiveResource(targetFile = \"${hiveconf:hadoop.tmp.dir}/customSerde/data.csv\")\n    private String data1 = \"\" +\n            \"a,b,c\\n\" +\n            \"f,g,h\\n\" +\n            \"t,j,k\\n\" +\n            \"q,w,e\\n\" +\n            \"r,t,y\\n\" +\n            \"u,i,o\";\n\n\n    @HiveSQL(files = {\"SerdeTest/create_table.sql\", \"SerdeTest/hql_custom_serde.sql\"}, autoStart = false)\n    private HiveShell hiveShell;\n\n    @Test\n    public void testWithProvidedRegexSerde() {\n        hiveShell.addResource(hdfsSource + \"/data.csv\", \"123#FOO\");\n        hiveShell.start();\n        Assertions.assertEquals(Arrays.asList(\"123\\tFOO\"), hiveShell.executeQuery(\"SELECT * FROM serde_test\"));\n    }\n\n    @Test\n    public void testWithCustomSerde() throws TException, IOException {\n        hiveShell.start();\n        List<String> actual = hiveShell.executeQuery(String.format(\"select * from customSerdeTable\"));\n        List<String> expected = Arrays.asList(\n                \"Q\\tW\\tE\",\n                \"R\\tT\\tY\",\n                \"U\\tI\\tO\",\n                \"A\\tB\\tC\",\n                \"F\\tG\\tH\",\n                \"T\\tJ\\tK\");\n\n        Collections.sort(actual);\n        Collections.sort(expected);\n\n        Assertions.assertEquals(expected, actual);\n    }\n\n\n}\n\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SetHiveExecutionEngineTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class SetHiveExecutionEngineTest {\n\n    @HiveRunnerSetup\n    public HiveRunnerConfig config = new HiveRunnerConfig() {{\n        setHiveExecutionEngine(\"tez\");\n    }};\n\n    @HiveSQL(files = {}, autoStart = false)\n    public HiveShell hiveShell;\n\n    @Test\n    public void test() {\n        hiveShell.start();\n        Assertions.assertEquals(\"tez\", hiveShell.getHiveConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SetPropertyTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class SetPropertyTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n    @Test\n    public void propertyShouldNotBeSetIfShellIsAlreadyStarted() {\n        shell.start();\n        Assertions.assertThrows(IllegalStateException.class, () -> shell.setHiveConfValue(\"foo\", \"bar\"));\n    }\n\n    @Test\n    public void propertyShouldBeSetInHiveConfiguration() {\n        shell.setHiveConfValue(\"foo\", \"bar\");\n        shell.start();\n        Assertions.assertEquals(\"bar\", shell.getHiveConf().get(\"foo\"));\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SetTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\nimport java.nio.file.Paths;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class SetTest {\n\n    @HiveSQL(files = {}, autoStart = true)\n    private HiveShell shell;\n\n    /**\n     *  This test doesn't actually fail but if it shows up as terminated in IntelliJ (which we can't assert on)\n     *  then there is a problem.\n     *\n     *  See https://github.com/klarna/HiveRunner/issues/94 for more details.\n     */\n    @Test\n    public void testWithSet() {\n        this.shell.execute(Paths.get(\"src/test/resources/SetTest/test_with_set.hql\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/SlowlyFailingUdf.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport org.apache.hadoop.hive.ql.exec.UDF;\nimport org.apache.hadoop.io.Text;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n\npublic class SlowlyFailingUdf extends UDF {\n\n    private static final Logger LOGGER = LoggerFactory.getLogger(SlowlyFailingUdf.class);\n\n\n    public Text evaluate(Text value) throws InterruptedException {\n        /**\n         * Sleep a little while so that the timeout thread will have time to take the synchronize lock\n         */\n        Thread.sleep(1000);\n        // Fail!\n        throw new RuntimeException(\"FAIL\");\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/TestMethodIntegrityTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.HashSet;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class TestMethodIntegrityTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    public HiveShell shell;\n\n    @Test\n    public void collisionCourseTestMethodOne() {\n        shell.addResource(\"${hiveconf:hadoop.tmp.dir}/foo/bar/data1.csv\", \"1\\n2\\n3\");\n        shell.addResource(\"${hiveconf:hadoop.tmp.dir}/foo/bar/data2.csv\", \"4\\n5\");\n        shell.addSetupScript(\"create database foo;\");\n        shell.addSetupScript(\"\" +\n                \" CREATE table foo.bar(id int)\" +\n                \" ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n                \" STORED AS TEXTFILE\" +\n                \" LOCATION '${hiveconf:hadoop.tmp.dir}/foo/bar';\");\n        shell.start();\n        List<String> actual = shell.executeQuery(\"select * from foo.bar\");\n        List<String> expected = Arrays.asList(\"1\", \"2\", \"3\", \"4\", \"5\");\n        Assertions.assertEquals(new HashSet<>(expected), new HashSet<>(actual));\n\n    }\n\n    @Test\n    public void collisionCourseTestMethodTwo() {\n        shell.addResource(\"${hiveconf:hadoop.tmp.dir}/foo/bar/data1.csv\", \"9\\n2\\n8\");\n        shell.addResource(\"${hiveconf:hadoop.tmp.dir}/foo/bar/data3.csv\", \"6\\n7\");\n        shell.addSetupScript(\"create database foo;\");\n        shell.addSetupScript(\"\" +\n                \" CREATE table foo.bar(id int)\" +\n                \" ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\" +\n                \" STORED AS TEXTFILE\" +\n                \" LOCATION '${hiveconf:hadoop.tmp.dir}/foo/bar';\");\n        shell.start();\n        List<String> actual = shell.executeQuery(\"select * from foo.bar\");\n        List<String> expected = Arrays.asList(\"2\", \"6\", \"7\", \"8\", \"9\");\n        Assertions.assertEquals(new HashSet<>(expected), new HashSet<>(actual));\n\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/TimeoutAndRetryTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.junit.Before;\nimport org.junit.Ignore;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\n/**\n * Test cases for verifying the Timeout functionality of HiveRunner.\n *\n * Due to timing issues these test cases may fail on a low resource test environment. In that case try raising the\n * Timeout by setting the 'TimeoutAndRetryTest.timeout.seconds' property in pom.xml or by passing it via command line\n * like -DTimeoutAndRetryTest.timeout.seconds=60\n */\n@RunWith(StandaloneHiveRunner.class)\npublic class TimeoutAndRetryTest {\n\n    @HiveRunnerSetup\n    public final static HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setTimeoutEnabled(true);\n        String timoutSeconds = System.getProperty(\"TimeoutAndRetryTest.timeout.seconds\");\n        setTimeoutSeconds(timoutSeconds == null ? 30 : Integer.parseInt(timoutSeconds));\n        setTimeoutRetries(2);\n    }};\n\n\n    /**\n     * Define the script files under test. The files will be loaded in the given order.\n     * <p/>\n     * The HiveRunner instantiate and inject the HiveShell\n     */\n    @HiveSQL(files = {})\n    private HiveShell hiveShell;\n\n    @Before\n    public void prepare() {\n        String disableTimeout = System.getProperty(\"disableTimeout\");\n        if (disableTimeout != null && Boolean.parseBoolean(disableTimeout)) {\n            System.out.println(\"Terminating test with success because timeout is disabled.\");\n        } else {\n            System.out.println(hiveShell.getBaseDir().getRoot());\n            System.out.println(hiveShell.executeQuery(\"show databases\"));\n            hiveShell.execute(\"create database baz\");\n            System.out.println(hiveShell.executeQuery(\"describe database baz\"));\n            hiveShell.execute(\"use baz\");\n\n            hiveShell.execute(\"create temporary function nonstop as 'com.klarna.hiverunner.NeverEndingUdf'\");\n\n            hiveShell.execute(\"create table foo (bar string)\");\n\n            hiveShell.execute(\"insert into table foo values ('a'), ('b'), ('c')\");\n        }\n    }\n\n    /**\n     * This test should fail after a number of retries. It's not possible to expect the TimeoutException thrown by\n     * the ThrowOnTimeout statement so this test is ignored.\n     */\n    @Ignore\n    @Test\n    public void neverEnd() {\n        hiveShell.executeQuery(\"select nonstop(bar) from foo\");\n    }\n\n    @Test(expected = IllegalArgumentException.class)\n    public void expectTest() {\n        throw new IllegalArgumentException(\"This should be expected\");\n    }\n\n    @Test(expected = TimeoutException.class)\n    public void expectTimoutTest() {\n        throw new TimeoutException(\"This should be expected\");\n    }\n\n    private static int throwOnSecondRunTimouts = 0;\n\n    @Test(expected = ArrayIndexOutOfBoundsException.class)\n    public void throwOnSecondRun() {\n        if (throwOnSecondRunTimouts == 0) {\n            throwOnSecondRunTimouts++;\n            try {\n                hiveShell.executeQuery(\"select nonstop(bar) from foo\");\n            } catch (Throwable e) {\n                System.out.println(\"Ignoring exception: \" + e.getMessage());\n                e.printStackTrace();\n            }\n        } else {\n\n            System.out.println(\"SECOND RUN!!!!\");\n\n            throw new ArrayIndexOutOfBoundsException();\n        }\n\n    }\n\n\n    private static int throwOnSecondRunTimouts2 = 0;\n\n    @Test(expected = TimeoutException.class)\n    public void throwOnSecondRun2() {\n        if (throwOnSecondRunTimouts2 == 0) {\n            throwOnSecondRunTimouts2++;\n            try {\n                hiveShell.executeQuery(\"select nonstop(bar) from foo\");\n            } catch (Throwable e) {\n                System.out.println(\"Ignoring exception: \" + e.getMessage());\n                e.printStackTrace();\n            }\n        } else {\n\n            System.out.println(\"SECOND RUN!!!!\");\n\n            throw new TimeoutException();\n        }\n\n    }\n\n    private static int endOnSecondRunTimeouts = 0;\n\n    @Test\n    public void endOnSecondRun() {\n        if (endOnSecondRunTimeouts == 0) {\n            endOnSecondRunTimeouts++;\n            try {\n                hiveShell.executeQuery(\"select nonstop(bar) from foo\");\n            } catch (Throwable e) {\n                System.out.println(\"Ignoring exception: \" + e.getMessage());\n                e.printStackTrace();\n            }\n        } else {\n            System.out.println(\"SECOND RUN!!!!\");\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/ToUpperCaseSerDe.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.hive.serde.serdeConstants;\nimport org.apache.hadoop.hive.serde2.AbstractSerDe;\nimport org.apache.hadoop.hive.serde2.SerDeException;\nimport org.apache.hadoop.hive.serde2.SerDeStats;\nimport org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;\nimport org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;\nimport org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.io.Writable;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Properties;\n\npublic class ToUpperCaseSerDe extends AbstractSerDe {\n\n    private List<String> columns;\n\n    @Override\n    public void initialize(Configuration configuration, Properties properties) throws SerDeException {\n        columns = Arrays.asList(((String) properties.get(serdeConstants.LIST_COLUMNS)).split(\",\"));\n    }\n\n    @Override\n    public Class<? extends Writable> getSerializedClass() {\n        return Text.class;\n    }\n\n    @Override\n    public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {\n        throw new SerDeException(\"Not implemented in test fixture\");\n    }\n\n    @Override\n    public Object deserialize(Writable writable) throws SerDeException {\n        String[] values = writable.toString().toUpperCase().split(\",\");\n        return Arrays.asList(values);\n    }\n\n    @Override\n    public ObjectInspector getObjectInspector() throws SerDeException {\n        // Constructing the row ObjectInspector:\n        // The row consists of some string columns, each column will be a java\n        // String object.\n        List<ObjectInspector> columnOIs = new ArrayList<>(columns.size());\n\n        for (int i = 0; i < columns.size(); i++) {\n            columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);\n        }\n\n        // Standard Struct uses ArrayList to store the row.\n        return ObjectInspectorFactory.getStandardStructObjectInspector(columns, columnOIs);\n\n    }\n\n    @Override\n    public SerDeStats getSerDeStats() {\n        return null;\n    }\n}\n\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/UnresolvedResourcePathTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.File;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class UnresolvedResourcePathTest {\n\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n\n    @Test\n    public void resourceFileShouldNotBeCreatedIfReferencesAreUnresolved() {\n        shell.addResource(\"${hiveconf:foo}/bar/baz.csv\", \"A,B,C\");\n        Assertions.assertThrows(IllegalArgumentException.class, () -> shell.start());\n    }\n\n    @Test\n    public void resourceFileShouldBeCreatedInsideTempDir() {\n        shell.addResource(\"${hiveconf:hadoop.tmp.dir}/bar/baz.csv\", \"A,B,C\");\n        shell.start();\n        Assertions.assertTrue(new File(shell.getHiveConf().get(\"hadoop.tmp.dir\"), \"bar/baz.csv\").exists());\n    }\n\n    @Test\n    public void resourceFilePathShouldAlwaysBeInsideTempDir() {\n        shell.addResource(\"/bar/baz.csv\", \"A,B,C\");\n        Assertions.assertThrows(IllegalArgumentException.class, () -> shell.start());\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/UserDefinedFunctionTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner;\n\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.Arrays;\nimport java.util.List;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class UserDefinedFunctionTest {\n\n\n    private final String hdfsSource = \"${hiveconf:hadoop.tmp.dir}/udf\";\n\n    @HiveSetupScript\n    String setup =\n            \"  CREATE TABLE udf_test (\" +\n                    \" id int,\" +\n                    \" value string\" +\n                    \"  )\" +\n                    \"  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'\" +\n                    \"  STORED AS TEXTFILE\" +\n                    \"  LOCATION '\" + hdfsSource + \"' \";\n\n\n    @HiveSQL(files = {}, autoStart = false)\n    public HiveShell hiveShell;\n\n    @Test\n    public void udfMax() {\n        hiveShell.addResource(hdfsSource + \"/data.csv\",\n                \"123\\tv1\\n\" +\n                        \"124\\tv2\\n\" +\n                        \"125\\tv3\");\n        hiveShell.start();\n        Assertions.assertEquals(Arrays.asList(\"125\"), hiveShell.executeQuery(\"SELECT max(id) FROM udf_test\"));\n    }\n\n    @Test\n    public void udfMin() {\n        hiveShell.addResource(hdfsSource + \"/data.csv\",\n                \"123\\tv1\\n\" +\n                        \"124\\tv2\\n\" +\n                        \"125\\tv3\");\n        hiveShell.start();\n        Assertions.assertEquals(Arrays.asList(\"123\"), hiveShell.executeQuery(\"SELECT min(id) FROM udf_test\"));\n    }\n\n    @Test\n    public void regexp_extract() {\n        hiveShell.addResource(hdfsSource + \"/data.csv\", \"1\\t123ABC\");\n        hiveShell.start();\n        List<String> expected = Arrays.asList(\"123\");\n        List<String> actual = hiveShell.executeQuery(\"SELECT regexp_extract(value, '([0-9]*)[A-Z]*', 1) FROM udf_test\");\n        Assertions.assertEquals(expected, actual);\n    }\n\n\n}\n\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/builder/HiveShellBaseTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.builder;\n\nimport static com.google.common.base.Charsets.UTF_8;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.junit.Assert.assertThat;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport com.google.common.io.Files;\nimport com.klarna.hiverunner.HiveServerContainer;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\nimport org.apache.commons.collections.MapUtils;\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.apache.hive.service.cli.CLIService;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.ArgumentCaptor;\nimport org.mockito.Captor;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.file.Paths;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n@RunWith(MockitoJUnitRunner.class)\npublic class HiveShellBaseTest {\n\n    @Rule\n    public TemporaryFolder tempFolder = new TemporaryFolder();\n\n    private HiveServerContainer container;\n    @Captor\n    private ArgumentCaptor<String> hiveSqlStatementCaptor;\n\n    @Test(expected = IllegalStateException.class)\n    public void variableSubstitutionShouldBlowUpIfShellIsNotStarted() {\n        HiveShell shell = createHiveCliShell(\"origin\", \"spanish\");\n        shell.expandVariableSubstitutes(\"The ${hiveconf:origin} fox\");\n    }\n\n    @Test\n    public void setupScriptMayBeAddedBeforeStart() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.addSetupScript(\"foo\");\n        shell.addSetupScripts(tempFolder.newFile(\"foo\"));\n    }\n\n    @Test\n    public void setupScriptsShouldBeExecutedAtStart() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.addSetupScript(\"foo\");\n        shell.addSetupScripts(tempFolder.newFile(\"foo\"));\n        shell.start();\n    }\n\n\n    @Test(expected = IllegalStateException.class)\n    public void setupScriptMayNotBeAddedAfterShellIsStarted() {\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.addSetupScript(\"foo\");\n    }\n\n    @Test(expected = IllegalArgumentException.class)\n    public void invalidFilePathShouldThrowException() {\n        HiveShell shell = createHiveCliShell();\n        shell.addSetupScripts(new File(\"foo\"));\n    }\n\n\n    @Test(expected = IllegalStateException.class)\n    public void setupScriptsMayNotBeAddedAfterShellIsStarted() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.addSetupScripts(tempFolder.newFile(\"foo\"));\n    }\n\n    @Test\n    public void executeScriptFile() throws IOException {\n        String hiveSql = \"use default\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.execute(file);\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test\n    public void executeScriptCharsetFile() throws IOException {\n        String hiveSql = \"use default\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.execute(UTF_8, file);\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test\n    public void executeScriptPath() throws IOException {\n        String hiveSql = \"use default\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.execute(Paths.get(file.toURI()));\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test\n    public void executeScriptCharsetPath() throws IOException {\n        String hiveSql = \"use default\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.execute(UTF_8, Paths.get(file.toURI()));\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test(expected = IllegalArgumentException.class)\n    public void executeScriptFileNotExists() throws IOException {\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n        shell.execute(UTF_8, Paths.get(file.toURI()));\n    }\n\n    @Test(expected = IllegalStateException.class)\n    public void executeScriptNotStarted() throws IOException {\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n\n        HiveShell shell = createHiveCliShell();\n        shell.execute(UTF_8, Paths.get(file.toURI()));\n    }\n\n    @Test\n    public void executeQueryFromFile() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n\n        String statement = \"select current_database(), NULL, 100\";\n        when(container.executeStatement(statement)).thenReturn(Arrays.<Object[]>asList(new Object[]{\"default\", null, 100}));\n        String hiveSql = statement + \";\";\n\n        File file = tempFolder.newFile(\"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        List<String> results = shell.executeQuery(UTF_8, Paths.get(file.toURI()), \"xxx\", \"yyy\");\n        assertThat(results.size(), is(1));\n        assertThat(results.get(0), is(\"defaultxxxyyyxxx100\"));\n    }\n\n    @Test(expected = IllegalArgumentException.class)\n    public void executeQueryFromFileMoreThanOneStatement() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n\n        String hiveSql = \"use default;\\nselect current_database(), NULL, 100;\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        shell.executeQuery(UTF_8, Paths.get(file.toURI()), \"xxx\", \"yyy\");\n    }\n\n    @Test(expected = IllegalArgumentException.class)\n    public void executeQueryFromFileZeroStatements() throws IOException {\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n\n        String hiveSql = \"\";\n\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(hiveSql, file, UTF_8);\n\n        shell.executeQuery(UTF_8, Paths.get(file.toURI()), \"xxx\", \"yyy\");\n    }\n\n    @Test\n    public void scriptFilesAreImportedInQueries() throws IOException {\n        String hiveSql = \"use default\";\n\n        File importedFile = new File(tempFolder.getRoot(), \"imported_script.sql\");\n        Files.write(hiveSql, importedFile, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n\n        String importhiveSql = \"source \" + importedFile.getAbsolutePath();\n        List<String> results = shell.executeQuery(importhiveSql);\n\n        assertThat(results.size(), is(0));\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test\n    public void scriptFilesAreImportedInOtherScriptsHiveCli() throws IOException {\n        String hiveSql = \"use default\";\n\n        File importedFile = new File(tempFolder.getRoot(), \"imported_script.sql\");\n        Files.write(hiveSql, importedFile, UTF_8);\n\n        HiveShell shell = createHiveCliShell();\n        shell.start();\n\n        String importhiveSql = \"source \" + importedFile.getAbsolutePath();\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(importhiveSql, file, UTF_8);\n\n        shell.execute(file);\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    @Test\n    public void scriptFilesAreImportedInOtherScriptsBeeline() throws IOException {\n        String hiveSql = \"use default\";\n\n        File importedFile = new File(tempFolder.getRoot(), \"imported_script.sql\");\n        Files.write(hiveSql, importedFile, UTF_8);\n\n        HiveShell shell = createBeelineShell();\n        shell.start();\n\n        String importhiveSql = \"!run \" + importedFile.getAbsolutePath();\n        File file = new File(tempFolder.getRoot(), \"script.sql\");\n        Files.write(importhiveSql, file, UTF_8);\n\n        shell.execute(file);\n\n        verify(container).executeStatement(hiveSql);\n    }\n\n    private HiveShell createHiveCliShell(String... keyValues) {\n        return createHiveShell(HiveCliEmulator.INSTANCE, keyValues);\n    }\n\n    private HiveShell createBeelineShell(String... keyValues) {\n        return createHiveShell(BeelineEmulator.INSTANCE, keyValues);\n    }\n\n    private HiveShell createHiveShell(CommandShellEmulator emulation, String... keyValues) {\n        Map<String, String> hiveConf = MapUtils.putAll(new HashMap(), keyValues);\n        HiveConf conf = createHiveconf(hiveConf);\n\n        CLIService client = Mockito.mock(CLIService.class);\n\n        container = Mockito.mock(HiveServerContainer.class);\n\n        List<String> setupScripts = Arrays.asList();\n        List<HiveResource> hiveResources = Arrays.asList();\n        List<Script> scriptsUnderTest = Arrays.asList();\n\n        return new HiveShellBase(container, hiveConf, setupScripts, hiveResources, scriptsUnderTest, emulation);\n    }\n\n\n    private HiveConf createHiveconf(Map<String, String> conf) {\n        HiveConf hiveConf = new HiveConf();\n        hiveConf.clear();\n\n        for (Map.Entry<String, String> keyValueEntry : conf.entrySet()) {\n            hiveConf.set(keyValueEntry.getKey(), keyValueEntry.getValue());\n        }\n        return hiveConf;\n    }\n\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/config/HiveRunnerConfigTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.config;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Properties;\n\nimport org.apache.hadoop.hive.conf.HiveConf;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\n\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulatorFactory;\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\n\npublic class HiveRunnerConfigTest {\n\n    @Test\n    public void testSetHiveconfFromSystemProperty() {\n\n        Properties sysProps = new Properties();\n        sysProps.put(\"hiveconf_foo.bar\", \"false\");\n        sysProps.put(\"hiveconf_fox.love\", \"1000\");\n\n        Map<String, String> expected = new HashMap<>();\n        expected.put(\"foo.bar\", \"false\");\n        expected.put(\"fox.love\", \"1000\");\n\n        HiveRunnerConfig config = new HiveRunnerConfig(sysProps);\n\n        Assertions.assertEquals(expected, config.getHiveConfSystemOverride());\n    }\n\n    @Test\n    public void testSetHiveExecutionEngine() {\n        Properties sysProps = new Properties();\n        sysProps.put(\"hiveconf_\" + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, \"foo\");\n        HiveRunnerConfig config = new HiveRunnerConfig(sysProps);\n        Assertions.assertEquals(\"foo\", config.getHiveExecutionEngine());\n    }\n\n    @Test\n    public void testEnableTimeout() {\n        Properties sysProps = new Properties();\n        sysProps.put(HiveRunnerConfig.ENABLE_TIMEOUT_PROPERTY_NAME,\n                String.valueOf(!HiveRunnerConfig.ENABLE_TIMEOUT_DEFAULT));\n        HiveRunnerConfig config = new HiveRunnerConfig(sysProps);\n        Assertions.assertEquals(!HiveRunnerConfig.ENABLE_TIMEOUT_DEFAULT, config.isTimeoutEnabled());\n    }\n\n    @Test\n    public void testTimeoutSeconds() {\n        Properties sysProps = new Properties();\n        sysProps.put(HiveRunnerConfig.TIMEOUT_SECONDS_PROPERTY_NAME, \"567\");\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties(sysProps));\n        Assertions.assertEquals(567, config.getTimeoutSeconds());\n    }\n\n    @Test\n    public void testTimeoutRetries() {\n        Properties sysProps = new Properties();\n        sysProps.put(HiveRunnerConfig.TIMEOUT_RETRIES_PROPERTY_NAME, \"678\");\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties(sysProps));\n        Assertions.assertEquals(678, config.getTimeoutRetries());\n    }\n\n    @Test\n    public void testCommandShellEmulator() {\n        Properties sysProps = new Properties();\n        sysProps.put(HiveRunnerConfig.COMMAND_SHELL_EMULATOR_PROPERTY_NAME, \"BEELINE\");\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties(sysProps));\n        Assertions.assertEquals(BeelineEmulator.INSTANCE, config.getCommandShellEmulator());\n\n        sysProps.put(HiveRunnerConfig.COMMAND_SHELL_EMULATOR_PROPERTY_NAME, \"beeline\");\n        config = new HiveRunnerConfig(new Properties(sysProps));\n        Assertions.assertEquals(BeelineEmulator.INSTANCE, config.getCommandShellEmulator());\n\n        sysProps.put(HiveRunnerConfig.COMMAND_SHELL_EMULATOR_PROPERTY_NAME, \"BeElInE\");\n        config = new HiveRunnerConfig(new Properties(sysProps));\n        Assertions.assertEquals(BeelineEmulator.INSTANCE, config.getCommandShellEmulator());\n    }\n\n    @Test\n    public void testSetCommandShellEmulator() {\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties());\n        config.setCommandShellEmulator(HiveCliEmulator.INSTANCE);\n        Assertions.assertEquals(HiveCliEmulator.INSTANCE, config.getCommandShellEmulator());\n        config.setCommandShellEmulator(BeelineEmulator.INSTANCE);\n        Assertions.assertEquals(BeelineEmulator.INSTANCE, config.getCommandShellEmulator());\n    }\n\n    @Test\n    public void testEnableTimeoutDefault() {\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties());\n        Assertions.assertEquals(HiveRunnerConfig.ENABLE_TIMEOUT_DEFAULT, config.isTimeoutEnabled());\n    }\n\n    @Test\n    public void testTimeoutSecondsDefault() {\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties());\n        Assertions.assertEquals(HiveRunnerConfig.TIMEOUT_SECONDS_DEFAULT, config.getTimeoutSeconds());\n    }\n\n    @Test\n    public void testTimeoutRetriesDefault() {\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties());\n        Assertions.assertEquals(HiveRunnerConfig.TIMEOUT_RETRIES_DEFAULT, config.getTimeoutRetries());\n    }\n\n    @Test\n    public void testCommandShellEmulatorDefault() {\n        HiveRunnerConfig config = new HiveRunnerConfig(new Properties());\n        Assertions.assertEquals(HiveCliEmulator.INSTANCE, CommandShellEmulatorFactory.valueOf(HiveRunnerConfig.COMMAND_SHELL_EMULATOR_DEFAULT));\n        Assertions.assertEquals(HiveCliEmulator.INSTANCE, config.getCommandShellEmulator());\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/data/ConvertersTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.binaryTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.byteTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.charTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.decimalTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.doubleTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.floatTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.shortTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.unknownTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.varcharTypeInfo;\nimport static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.voidTypeInfo;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.sql.Date;\nimport java.sql.Timestamp;\n\nimport org.apache.commons.beanutils.ConversionException;\nimport org.apache.hadoop.hive.common.type.HiveChar;\nimport org.apache.hadoop.hive.common.type.HiveDecimal;\nimport org.apache.hadoop.hive.common.type.HiveVarchar;\nimport org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;\nimport org.junit.jupiter.api.Test;\n\npublic class ConvertersTest {\n\n    @Test\n    public void inputNull() {\n        for (PrimitiveTypeInfo typeInfo : Converters.TYPES.keySet()) {\n            assertNull(Converters.convert(null, typeInfo));\n        }\n    }\n\n    @Test\n    public void inputNotString() {\n        for (PrimitiveTypeInfo typeInfo : Converters.TYPES.keySet()) {\n            assertEquals(0, Converters.convert(0, typeInfo));\n        }\n    }\n\n    @Test\n    public void stringTypeInfo() {\n        assertEquals(\"foo\", Converters.convert(\"foo\", stringTypeInfo));\n    }\n\n    @Test\n    public void booleanTypeInfo() {\n        assertConversionException(\"foo\", booleanTypeInfo);\n        assertEquals(true, Converters.convert(\"true\", booleanTypeInfo));\n        assertEquals(false, Converters.convert(\"false\", booleanTypeInfo));\n    }\n\n    @Test\n    public void byteTypeInfo() {\n        assertConversionException(\"foo\", byteTypeInfo);\n        assertConversionException(\"-129\", byteTypeInfo);\n        assertEquals((byte) -128, Converters.convert(\"-128\", byteTypeInfo));\n        assertEquals((byte) 127, Converters.convert(\"127\", byteTypeInfo));\n        assertConversionException(\"128\", byteTypeInfo);\n    }\n\n    @Test\n    public void shortTypeInfo() {\n        assertConversionException(\"foo\", shortTypeInfo);\n        assertConversionException(\"-32769\", shortTypeInfo);\n        assertEquals((short) -32768, Converters.convert(\"-32768\", shortTypeInfo));\n        assertEquals((short) 32767, Converters.convert(\"32767\", shortTypeInfo));\n        assertConversionException(\"32768\", shortTypeInfo);\n    }\n\n    @Test\n    public void intTypeInfo() {\n        assertConversionException(\"foo\", intTypeInfo);\n        assertConversionException(\"-2147483649\", intTypeInfo);\n        assertEquals(-2147483648, Converters.convert(\"-2147483648\", intTypeInfo));\n        assertEquals(2147483647, Converters.convert(\"2147483647\", intTypeInfo));\n        assertConversionException(\"2147483648\", intTypeInfo);\n    }\n\n    @Test\n    public void longTypeInfo() {\n        assertConversionException(\"foo\", longTypeInfo);\n        assertConversionException(\"-9223372036854775809\", longTypeInfo);\n        assertEquals(-9223372036854775808L, Converters.convert(\"-9223372036854775808\", longTypeInfo));\n        assertEquals(9223372036854775807L, Converters.convert(\"9223372036854775807\", longTypeInfo));\n        assertConversionException(\"9223372036854775808\", longTypeInfo);\n    }\n\n    @Test\n    public void floatTypeInfo() {\n        assertConversionException(\"foo\", floatTypeInfo);\n        assertEquals(0F, Converters.convert(\"0\", floatTypeInfo));\n    }\n\n    @Test\n    public void doubleTypeInfo() {\n        assertConversionException(\"foo\", doubleTypeInfo);\n        assertEquals(0D, Converters.convert(\"0\", doubleTypeInfo));\n    }\n\n    @Test\n    public void dateTypeInfo() {\n        assertConversionException(\"foo\", dateTypeInfo);\n        assertEquals(org.apache.hadoop.hive.common.type.Date.valueOf(\"2015-10-15\"),\n                Converters.convert(\"2015-10-15\", dateTypeInfo));\n    }\n\n    @Test\n    public void timestampTypeInfo() {\n        assertConversionException(\"foo\", timestampTypeInfo);\n        assertEquals(org.apache.hadoop.hive.common.type.Timestamp.valueOf(\"2015-10-15 23:59:59.999\"),\n                Converters.convert(\"2015-10-15 23:59:59.999\", timestampTypeInfo));\n    }\n\n    @Test\n    public void binaryTypeInfo() {\n        assertConversionException(\"foo\", binaryTypeInfo);\n        assertArrayEquals(new byte[]{0, 1, 2}, (byte[]) Converters.convert(\"0,1,2\", binaryTypeInfo));\n    }\n\n    @Test\n    public void otherTypeInfo() {\n        assertEquals(HiveDecimal.create(\"1.234\"), Converters.convert(\"1.234\", decimalTypeInfo));\n        assertEquals(new HiveChar(\"foo\", -1), Converters.convert(\"foo\", charTypeInfo));\n        assertTrue(new HiveVarchar(\"foo\", -1).equals((HiveVarchar) Converters.convert(\"foo\", varcharTypeInfo)));\n        assertEquals(\"foo\", Converters.convert(\"foo\", unknownTypeInfo));\n        assertEquals(\"foo\", Converters.convert(\"foo\", voidTypeInfo));\n    }\n\n    private void assertConversionException(Object value, PrimitiveTypeInfo typeInfo) {\n        try {\n            System.out.println(Converters.convert(value, typeInfo));\n        } catch (ConversionException e) {\n            return;\n        }\n        fail(\"Expected \" + ConversionException.class.getSimpleName() + \" for value \" + value + \" (\"\n                + value.getClass().getSimpleName() + \") to \" + typeInfo.getTypeName());\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/data/InsertIntoTableTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.File;\nimport java.util.Map;\n\nimport org.apache.hive.hcatalog.data.HCatRecord;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.google.common.collect.Multimap;\n\n@ExtendWith(MockitoExtension.class)\npublic class InsertIntoTableTest {\n\n    @Mock\n    private TableDataBuilder builder;\n    @Mock\n    private TableDataInserter inserter;\n\n    private InsertIntoTable insert;\n\n    @BeforeEach\n    public void before() {\n        insert = new InsertIntoTable(builder, inserter);\n    }\n\n    @Test\n    public void withColumns() {\n        String[] columns = new String[]{\"columnA\", \"columnB\"};\n        insert.withColumns(columns);\n\n        verify(builder).withColumns(columns);\n    }\n\n    @Test\n    public void withAllColumns() {\n        insert.withAllColumns();\n\n        verify(builder).withAllColumns();\n    }\n\n    @Test\n    public void newRow() {\n        insert.newRow();\n\n        verify(builder).newRow();\n    }\n\n    @Test\n    public void addRow() {\n        Object[] row = new Object[]{\"columnA\"};\n        insert.addRow(row);\n\n        verify(builder).addRow(row);\n    }\n\n    @Test\n    public void setRow() {\n        Object[] row = new Object[]{\"columnA\"};\n        insert.setRow(row);\n\n        verify(builder).setRow(row);\n    }\n\n    @Test\n    public void addRows() {\n        File file = new File(\"foo\");\n        insert.addRowsFromTsv(file);\n\n        verify(builder).addRowsFromTsv(file);\n    }\n\n    @Test\n    public void addRowsWithFileParser() {\n        File file = new File(\"foo\");\n        FileParser parser = new TsvFileParser();\n        insert.addRowsFrom(file, parser);\n\n        verify(builder).addRowsFrom(file, parser);\n    }\n\n    @Test\n    public void copyRow() {\n        insert.copyRow();\n\n        verify(builder).copyRow();\n    }\n\n    @Test\n    public void set() {\n        insert.set(\"a\", \"b\");\n\n        verify(builder).set(\"a\", \"b\");\n    }\n\n    @Test\n    public void commit() {\n        Multimap<Map<String, String>, HCatRecord> map = mock(Multimap.class);\n        when(builder.build()).thenReturn(map);\n        insert.commit();\n\n        verify(inserter).insert(map);\n\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/data/TableDataBuilderTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\n\nimport org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;\nimport org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;\nimport org.apache.hive.hcatalog.api.HCatTable;\nimport org.apache.hive.hcatalog.common.HCatException;\nimport org.apache.hive.hcatalog.data.HCatRecord;\nimport org.apache.hive.hcatalog.data.schema.HCatFieldSchema;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.google.common.collect.Multimap;\n\n@ExtendWith(MockitoExtension.class)\npublic class TableDataBuilderTest {\n\n    private static final String DATABASE_NAME = \"test_db\";\n    private static final String TABLE_NAME = \"test_table\";\n    private static final String COLUMN_1 = \"column_1\";\n    private static final String PARTITION_COLUMN_1 = \"partition_column_1\";\n\n    private static final PrimitiveTypeInfo STRING = TypeInfoFactory.stringTypeInfo;\n\n    @Test\n    public void testUnknownColumnNameWithColumnMask() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n        Assertions.assertThrows(IllegalArgumentException.class, () -> {\n            new TableDataBuilder(table).withColumns(\"unknown_column\");\n        });\n    }\n\n    @Test\n    public void testUnknownColumnNameOnSet() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n        Assertions.assertThrows(IllegalArgumentException.class, () -> {\n            new TableDataBuilder(table).set(\"unknown_column\", \"value\");\n        });\n    }\n\n    @Mock\n    private TsvFileParser tsvFileParser;\n\n    @Test\n    public void testAddRowsFromWithMixedCaseColumnNames() {\n        File file = new File(\"\");\n        HCatTable table = table().cols(columns(\"COLUMN_1\", \"coLUMN_2\", \"column_3\"));\n        TableDataBuilder tableDataBuilder = Mockito.spy(new TableDataBuilder(table));\n\n        when(tsvFileParser.hasColumnNames()).thenReturn(true);\n        when(tsvFileParser.getColumnNames(file)).thenReturn(Arrays.asList(\"COLUMN_1\", \"coLUMN_2\", \"column_3\"));\n\n        tableDataBuilder.addRowsFrom(file, tsvFileParser);\n        verify(tableDataBuilder, times(1)).withColumns(\"column_1\", \"column_2\", \"column_3\");\n    }\n\n    @Test\n    public void testAddRowWithNoArguments() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n        Assertions.assertThrows(IllegalArgumentException.class, () -> {\n            new TableDataBuilder(table).addRow();\n        });\n    }\n\n    @Test\n    public void testAddRowWithIncorrectNumberOfArguments() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n        Assertions.assertThrows(IllegalArgumentException.class, () -> {\n            new TableDataBuilder(table).addRow(\"value1\", \"value2\");\n        });\n    }\n\n    @Test\n    public void testCopyRowWhenNoRowToCopy() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n        Assertions.assertThrows(IllegalStateException.class, () -> {\n            new TableDataBuilder(table).copyRow();\n        });\n    }\n\n    @Test\n    public void testCopyRow() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n\n        Multimap<Map<String, String>, HCatRecord> data = new TableDataBuilder(table).addRow(\"value\").copyRow().build();\n\n        assertEquals(2, data.size());\n        Iterator<HCatRecord> iterator = data.values().iterator();\n        HCatRecord row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value\"), row.getAll());\n        row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value\"), row.getAll());\n    }\n\n    @Test\n    public void testUnpartitionedEmptyRow() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n\n        Multimap<Map<String, String>, HCatRecord> data = new TableDataBuilder(table).newRow().build();\n\n        assertEquals(1, data.size());\n        Iterator<HCatRecord> iterator = data.values().iterator();\n        HCatRecord row = iterator.next();\n        assertEquals(Arrays.asList((Object) null), row.getAll());\n    }\n\n    @Test\n    public void testUnpartitionedWithColumnMask() {\n        HCatTable table = table().cols(columns(COLUMN_1));\n\n        Multimap<Map<String, String>, HCatRecord> data = new TableDataBuilder(table)\n                .withColumns(COLUMN_1)\n                .addRow(\"value\")\n                .build();\n\n        assertEquals(1, data.size());\n        Iterator<HCatRecord> iterator = data.values().iterator();\n        HCatRecord row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value\"), row.getAll());\n    }\n\n    @Test\n    public void testPartitionedNullPartitionColumnValue() {\n        HCatTable table = table().cols(columns(COLUMN_1)).partCols(columns(PARTITION_COLUMN_1));\n        Assertions.assertThrows(IllegalStateException.class, () -> {\n            new TableDataBuilder(table).newRow().build();\n        });\n    }\n\n    @Test\n    public void testPartitionedSimple() {\n        HCatTable table = table().cols(columns(COLUMN_1)).partCols(columns(PARTITION_COLUMN_1));\n\n        Multimap<Map<String, String>, HCatRecord> data = new TableDataBuilder(table)\n                .addRow(\"value\", \"partition_value\")\n                .build();\n\n        assertEquals(1, data.size());\n\n        Map<String, String> partitionSpec = new HashMap<>();\n        partitionSpec.put(PARTITION_COLUMN_1, \"partition_value\");\n\n        Collection<HCatRecord> rows = data.get(partitionSpec);\n        assertEquals(1, rows.size());\n        HCatRecord row = rows.iterator().next();\n        assertEquals(Arrays.asList((Object) \"value\", \"partition_value\"), row.getAll());\n    }\n\n    @Test\n    public void testPartitionedMultiplePartitionsAndRows() {\n        HCatTable table = table().cols(columns(COLUMN_1)).partCols(columns(PARTITION_COLUMN_1));\n\n        Multimap<Map<String, String>, HCatRecord> data = new TableDataBuilder(table)\n                .addRow(\"value1\", \"partition_value1\")\n                .addRow(\"value2\", \"partition_value1\")\n                .addRow(\"value3\", \"partition_value2\")\n                .addRow(\"value4\", \"partition_value2\")\n                .build();\n\n        assertEquals(4, data.size());\n\n        Map<String, String> partitionSpec = new HashMap<>();\n        partitionSpec.put(PARTITION_COLUMN_1, \"partition_value1\");\n\n        Collection<HCatRecord> rows = data.get(partitionSpec);\n        assertEquals(2, rows.size());\n        Iterator<HCatRecord> iterator = rows.iterator();\n        HCatRecord row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value1\", \"partition_value1\"), row.getAll());\n        row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value2\", \"partition_value1\"), row.getAll());\n\n        partitionSpec = new HashMap<>();\n        partitionSpec.put(PARTITION_COLUMN_1, \"partition_value2\");\n\n        rows = data.get(partitionSpec);\n        assertEquals(2, rows.size());\n        iterator = rows.iterator();\n        row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value3\", \"partition_value2\"), row.getAll());\n        row = iterator.next();\n        assertEquals(Arrays.asList((Object) \"value4\", \"partition_value2\"), row.getAll());\n    }\n\n    private static HCatTable table() {\n        return new HCatTable(DATABASE_NAME, TABLE_NAME);\n    }\n\n    private static HCatFieldSchema column(String name) {\n        try {\n            return new HCatFieldSchema(name, STRING, null);\n        } catch (HCatException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    private static List<HCatFieldSchema> columns(String... names) {\n        List<HCatFieldSchema> columns = new ArrayList<>();\n        for (String name : names) {\n            columns.add(column(name));\n        }\n        return columns;\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/data/TableDataInserterTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static java.util.Arrays.asList;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport static com.google.common.collect.ImmutableMap.of;\n\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\n\nimport org.apache.hive.hcatalog.data.DefaultHCatRecord;\nimport org.apache.hive.hcatalog.data.HCatRecord;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport com.google.common.collect.ImmutableMultimap;\nimport com.google.common.collect.Multimap;\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.StandaloneHiveRunner;\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n@ExtendWith(HiveRunnerExtension.class)\npublic class TableDataInserterTest {\n\n    private static final String TEST_TABLE = \"test_table\";\n    private static final String TEST_DB = \"testdb\";\n    @HiveSQL(encoding = \"UTF-8\", files = {})\n    private HiveShell hiveShell;\n    private String dataLocation;\n\n    @BeforeEach\n    public void setUp() throws IOException {\n        dataLocation = Files.createTempDirectory(hiveShell.getBaseDir(), \"hiverunner_data\").toString();\n        hiveShell.execute(\"create database testdb\");\n        hiveShell.execute(\"create table testdb.test_table (a STRING, b STRING) \"\n                + \"PARTITIONED BY(local_date STRING) STORED AS ORC LOCATION '\" + dataLocation + \"'\");\n    }\n\n    @Test\n    public void insertsRowsIntoExistingTable() {\n        Multimap<Map<String, String>, HCatRecord> data = ImmutableMultimap\n                .<Map<String, String>, HCatRecord>builder()\n                .put(of(\"local_date\", \"2015-10-14\"), new DefaultHCatRecord(asList((Object) \"aa\", \"bb\")))\n                .put(of(\"local_date\", \"2015-10-14\"), new DefaultHCatRecord(asList((Object) \"aa2\", \"bb2\")))\n                .put(of(\"local_date\", \"2015-10-14\"), new DefaultHCatRecord(asList((Object) \"cc\", \"dd\")))\n                .put(of(\"local_date\", \"2015-10-15\"), new DefaultHCatRecord(asList((Object) \"ee\", \"ff\")))\n                .build();\n\n        TableDataInserter inserter = new TableDataInserter(TEST_DB, TEST_TABLE, hiveShell.getHiveConf());\n        inserter.insert(data);\n\n        List<String> result = hiveShell.executeQuery(\"select * from testdb.test_table\");\n        Collections.sort(result);\n\n        assertEquals(4, result.size());\n        assertEquals(\"aa\", result.get(0).split(\"\\t\")[0]);\n        assertEquals(\"bb\", result.get(0).split(\"\\t\")[1]);\n        assertEquals(\"2015-10-14\", result.get(0).split(\"\\t\")[2]);\n\n        assertEquals(\"aa2\", result.get(1).split(\"\\t\")[0]);\n        assertEquals(\"bb2\", result.get(1).split(\"\\t\")[1]);\n        assertEquals(\"2015-10-14\", result.get(1).split(\"\\t\")[2]);\n\n        assertEquals(\"cc\", result.get(2).split(\"\\t\")[0]);\n        assertEquals(\"dd\", result.get(2).split(\"\\t\")[1]);\n        assertEquals(\"2015-10-14\", result.get(2).split(\"\\t\")[2]);\n\n        assertEquals(\"ee\", result.get(3).split(\"\\t\")[0]);\n        assertEquals(\"ff\", result.get(3).split(\"\\t\")[1]);\n        assertEquals(\"2015-10-15\", result.get(3).split(\"\\t\")[2]);\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/data/TsvFileParserTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.data;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.io.File;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\n\npublic class TsvFileParserTest {\n\n    @Test\n    public void parsesTsv() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/data.tsv\");\n        TsvFileParser tsvFileParser = new TsvFileParser();\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", \"e1\"}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", \"c2\", \"d2\", \"e2\"}, result.get(1));\n    }\n\n    @Test\n    public void parsesTsvNotEnoughFieldsInFile() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/data.tsv\");\n        TsvFileParser tsvFileParser = new TsvFileParser();\n        Assertions.assertThrows(IllegalStateException.class, () -> {\n            tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"));\n        });\n    }\n\n    @Test\n    public void parsesTsvSubSelectFields() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/data.tsv\");\n        TsvFileParser tsvFileParser = new TsvFileParser();\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\"}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", \"c2\", \"d2\"}, result.get(1));\n    }\n\n    @Test\n    public void parsesCsvWithEmptyFields() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/data.csv\");\n        TsvFileParser tsvFileParser = new TsvFileParser().withDelimiter(\",\");\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", null}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", null, \"d2\", \"e2\"}, result.get(1));\n    }\n\n    @Test\n    public void csvWithCustomNullValue() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/dataWithCustomNullValue.csv\");\n        TsvFileParser tsvFileParser = new TsvFileParser().withDelimiter(\",\").withNullValue(\"NULL\");\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", null}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", null, \"d2\", \"e2\"}, result.get(1));\n    }\n\n    @Test\n    public void tsvWithHeader() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/dataWithHeader.tsv\");\n        TsvFileParser tsvFileParser = new TsvFileParser().withHeader();\n\n        assertTrue(tsvFileParser.hasColumnNames());\n        assertEquals(tsvFileParser.getColumnNames(dataFile), Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", \"e1\"}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", \"c2\", \"d2\", \"e2\"}, result.get(1));\n    }\n\n    @Test\n    public void csvWithHeader() {\n        File dataFile = new File(\"src/test/resources/TsvFileParserTest/dataWithHeader.csv\");\n        TsvFileParser tsvFileParser = new TsvFileParser().withDelimiter(\",\").withHeader();\n\n        assertTrue(tsvFileParser.hasColumnNames());\n        assertEquals(tsvFileParser.getColumnNames(dataFile), Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n\n        List<Object[]> result = tsvFileParser.parse(dataFile, null, Arrays.asList(\"a\", \"b\", \"c\", \"d\", \"e\"));\n        assertEquals(2, result.size());\n        assertArrayEquals(new String[]{\"a1\", \"b1\", \"c1\", \"d1\", null}, result.get(0));\n        assertArrayEquals(new String[]{\"a2\", \"b2\", null, \"d2\", \"e2\"}, result.get(1));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/HelloAnnotatedHiveRunnerTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples;\n\nimport com.google.common.collect.Sets;\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.annotations.HiveProperties;\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.apache.commons.collections.MapUtils;\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.File;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * Hive Runner Reference implementation.\n * <p/>\n * All HiveRunner tests should run with the StandaloneHiveRunner\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class HelloAnnotatedHiveRunnerTest {\n\n    /**\n     * Explicit test class configuration of the HiveRunner runtime.\n     * See {@link HiveRunnerConfig} for further details.\n     */\n    @HiveRunnerSetup\n    public final HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setHiveExecutionEngine(\"mr\");\n    }};\n\n    /**\n     * Cater for all the parameters in the script that we want to test.\n     * Note that the \"hadoop.tmp.dir\" is one of the dirs defined by the test harness\n     */\n    @HiveProperties\n    public Map<String, String> hiveProperties = MapUtils.putAll(new HashMap(), new Object[]{\n            \"MY.HDFS.DIR\", \"${hadoop.tmp.dir}\",\n            \"my.schema\", \"bar\",\n    });\n\n    /**\n     * In this example, the scripts under test expects a schema to be already present in hive so\n     * we do that with a setup script.\n     * <p/>\n     * There may be multiple setup scripts but the order of execution is undefined.\n     */\n    @HiveSetupScript\n    private String createSchemaScript = \"create schema ${hiveconf:my.schema}\";\n\n    /**\n     * Create some data in the target directory. Note that the 'targetFile' references the\n     * same dir as the create table statement in the script under test.\n     * <p/>\n     * This example is for defining the data in line as a string.\n     */\n    @HiveResource(targetFile = \"${hiveconf:MY.HDFS.DIR}/foo/data_from_string.csv\")\n    private String dataFromString = \"2,World\\n3,!\";\n\n    /**\n     * Create some data in the target directory. Note that the 'targetFile' references the\n     * same dir as the create table statement in the script under test.\n     * <p/>\n     * This example is for defining the data in in a resource file.\n     */\n    @HiveResource(targetFile = \"${hiveconf:MY.HDFS.DIR}/foo/data_from_file.csv\")\n    private File dataFromFile =\n            new File(ClassLoader.getSystemResource(\"HelloHiveRunnerTest/hello_hive_runner.csv\").getPath());\n\n    /**\n     * Define the script files under test. The files will be loaded in the given order.\n     * <p/>\n     * The HiveRunner instantiate and inject the HiveShell\n     */\n    @HiveSQL(files = {\n            \"HelloHiveRunnerTest/create_table.sql\",\n            \"HelloHiveRunnerTest/create_ctas.sql\"\n    }, encoding = \"UTF-8\")\n    private HiveShell hiveShell;\n\n    @Test\n    public void testTablesCreated() {\n        HashSet<String> expected = Sets.newHashSet(\"foo\", \"foo_prim\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell.executeQuery(\"show tables\"));\n\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void testSelectFromFooWithCustomDelimiter() {\n        HashSet<String> expected = Sets.newHashSet(\"3,!\", \"2,World\", \"1,Hello\", \"N/A,bar\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell.executeQuery(\"select * from foo\", \",\", \"N/A\"));\n        Assertions.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void testSelectFromFooWithTypeCheck() {\n\n        List<Object[]> actual = hiveShell.executeStatement(\"select * from foo order by i\");\n\n        Assertions.assertArrayEquals(new Object[]{null, \"bar\"}, actual.get(0));\n        Assertions.assertArrayEquals(new Object[]{1, \"Hello\"}, actual.get(1));\n        Assertions.assertArrayEquals(new Object[]{2, \"World\"}, actual.get(2));\n        Assertions.assertArrayEquals(new Object[]{3, \"!\"}, actual.get(3));\n    }\n\n    @Test\n    public void testSelectFromCtas() {\n        HashSet<String> expected = Sets.newHashSet(\"Hello\", \"World\", \"!\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell\n                .executeQuery(\"select a.s from (select s, i from foo_prim order by i) a where a.i is not null\"));\n        Assertions.assertEquals(expected, actual);\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/HelloHiveRunnerParamaterizedTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.annotations.HiveSQL;\n\n/**\n * A basic Hive Runner example showing how to use JUnit5's ParameterizedTest.\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class HelloHiveRunnerParamaterizedTest {\n\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    @BeforeEach\n    public void setupSourceDatabase() {\n        shell.executeStatement(\"CREATE DATABASE source_db\");\n    }\n\n    @ParameterizedTest\n    @ValueSource(strings = {\"SEQUENCEFILE\", \"ORC\", \"PARQUET\"})\n    public void testFileFormats(String fileFormat) {\n        shell.executeStatement(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table (\")\n                .append(\"year STRING, value INT\")\n                .append(\") stored as \")\n                .append(fileFormat)\n                .toString());\n\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withColumns(\"year\", \"value\")\n                .addRow(\"2014\", 3)\n                .addRow(\"2014\", 4)\n                .addRow(\"2015\", 2)\n                .addRow(\"2015\", 5)\n                .commit();\n\n        List<Object[]> result = shell.executeStatement(\"select year, max(value) from source_db.test_table group by year\");\n\n        assertEquals(2, result.size());\n        assertArrayEquals(new Object[]{\"2014\", 4}, result.get(0));\n        assertArrayEquals(new Object[]{\"2015\", 5}, result.get(1));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/HelloHiveRunnerTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples;\n\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.nio.file.Paths;\nimport java.util.List;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\n/**\n * A basic Hive Runner example showing how to setup the test source database and target database, execute the query\n * and then validate the result.\n *\n * In this example we want to test some very simple code, calculate_max.sql, that calculate a max value by year.\n *\n * <p/>\n * All HiveRunner tests should run with the StandaloneHiveRunner and have a reference to HiveShell.\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class HelloHiveRunnerTest {\n\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    @BeforeEach\n    public void setupSourceDatabase() {\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table (\")\n                .append(\"year STRING, value INT\")\n                .append(\")\")\n                .toString());\n\n        shell.execute(Paths.get(\"src/test/resources/HelloHiveRunnerTest/create_max.sql\"));\n    }\n\n    @Test\n    public void testMaxValueByYear() {\n        /*\n         * Insert some source data\n         */\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withColumns(\"year\", \"value\")\n                .addRow(\"2014\", 3)\n                .addRow(\"2014\", 4)\n                .addRow(\"2015\", 2)\n                .addRow(\"2015\", 5)\n                .commit();\n\n        /*\n         * Execute the query\n         */\n        shell.execute(Paths.get(\"src/test/resources/HelloHiveRunnerTest/calculate_max.sql\"));\n\n        /*\n         * Verify the result\n         */\n        List<Object[]> result = shell.executeStatement(\"select * from my_schema.result\");\n\n        assertEquals(2, result.size());\n        assertArrayEquals(new Object[]{\"2014\", 4}, result.get(0));\n        assertArrayEquals(new Object[]{\"2015\", 5}, result.get(1));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/InsertTestDataTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples;\n\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.data.TsvFileParser;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.io.File;\nimport java.util.Arrays;\nimport java.util.List;\n\n/*\n    This example is intended to be a small show case for some of the ways of setting up your test data in HiveRunner.\n    It will only print out some result and thus is not a strict unit test suite.\n\n    The examples will go through cases with adding test data from \"code\" or from file, and how you only need to supply\n    a selected subset of the columns or how to use more advanced features like files with custom separator characters\n    or custom NULL keywords in the test data files.\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class InsertTestDataTest {\n\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    @BeforeEach\n    public void setupDatabase() {\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table (\")\n                .append(\"col_a STRING, col_b INT, col_c BOOLEAN\")\n                .append(\")\")\n                .toString());\n    }\n\n    @Test\n    public void insertRowsFromCode() {\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withAllColumns()\n                .addRow(\"Value1\", 1, true)\n                .addRow(\"Value2\", 99, false)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"), \"from code\");\n    }\n\n    @Test\n    public void insertRowsFromCodeWithSelectedColumns() {\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withColumns(\"col_a\", \"col_c\")\n                .addRow(\"Value1\", true)\n                .addRow(\"Value2\", false)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"), \"from code selected columns\");\n    }\n\n    @Test\n    public void insertRowsFromTsvFile() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/data1.tsv\");\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withAllColumns()\n                .addRowsFromTsv(dataFile)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"), \"TSV file\");\n    }\n\n    @Test\n    public void insertRowsFromTsvFileWithHeader() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/dataWithHeader1.tsv\");\n        TsvFileParser parser = new TsvFileParser().withHeader();\n        shell.insertInto(\"source_db\", \"test_table\")\n                .addRowsFrom(dataFile, parser)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"), \"TSV file header\");\n    }\n\n    @Test\n    public void insertRowsFromTsvFileWithSubsetHeader() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/dataWithHeader2.tsv\");\n        TsvFileParser parser = new TsvFileParser().withHeader();\n        shell.insertInto(\"source_db\", \"test_table\")\n                .addRowsFrom(dataFile, parser)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"), \"TSV file subset header\");\n    }\n\n    @Test\n    public void insertRowsIntoPartitionedTableStoredAsSequencefileWithCustomDelimiterAndNullValue() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/data2.tsv\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table2 (\")\n                .append(\"col_a STRING, col_b INT\")\n                .append(\")\")\n                .append(\"partitioned by (col_c string)\")\n                .append(\"stored as SEQUENCEFILE\")\n                .toString());\n\n        shell.insertInto(\"source_db\", \"test_table2\")\n                .withAllColumns()\n                .addRowsFrom(dataFile, new TsvFileParser().withDelimiter(\":\").withNullValue(\"__NULL__\"))\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table2\"), \"long method name\");\n    }\n\n    private void printResult(List<Object[]> result, String methodName) {\n        System.out.println(String.format(\"Result from %s:\", methodName));\n        for (Object[] row : result) {\n            System.out.println(Arrays.asList(row));\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/SetHiveConfValuesTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples;\n\nimport com.klarna.hiverunner.HiveRunnerExtension;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\n\nimport java.util.List;\n\n/*\n    This example is intended to show how to set HiveConf (or HiveVar) values in HiveRunner.\n\n    HiveConf can be very useful. For instance you might have a global cutoff value that could be set outside your code\n    and used in many places in your queries. Common example would be a threshold value or a cutoff timestamp.\n\n    To use the HiveConf values in HiveRunner, you must first make sure to switch off the autoStart flag. Then you can\n    set the HiveConf values and, before executing any queries, manually start the HIveRunner shell. Make sure this is\n    done first in your test setup. like shown in the example below.\n */\n@ExtendWith(HiveRunnerExtension.class)\npublic class SetHiveConfValuesTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n    @BeforeEach\n    public void setupDatabases() {\n        shell.setHiveConfValue(\"cutoff\", \"50\");\n        shell.start();\n\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.table_a (\")\n                .append(\"message STRING, value INT\")\n                .append(\")\")\n                .toString());\n\n        shell.insertInto(\"source_db\", \"table_a\")\n                .withAllColumns()\n                .addRow(\"An ignored message\", 1)\n                .addRow(\"Hello\", 51)\n                .addRow(\"World\", 99)\n                .commit();\n    }\n\n    @Test\n    public void useHiveConfValues() {\n        List<Object[]> result = shell.executeStatement(\n                \"select message from source_db.table_a where value > ${hiveconf:cutoff}\");\n\n        for (Object[] row : result) {\n            System.out.print(row[0] + \" \");\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/junit4/HelloAnnotatedHiveRunnerTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples.junit4;\n\nimport com.google.common.collect.Sets;\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.StandaloneHiveRunner;\nimport com.klarna.hiverunner.annotations.HiveProperties;\nimport com.klarna.hiverunner.annotations.HiveResource;\nimport com.klarna.hiverunner.annotations.HiveRunnerSetup;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.annotations.HiveSetupScript;\nimport com.klarna.hiverunner.config.HiveRunnerConfig;\nimport org.apache.commons.collections.MapUtils;\nimport org.junit.Assert;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.io.File;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * Hive Runner Reference implementation.\n * <p/>\n * All HiveRunner tests should run with the StandaloneHiveRunner\n */\n@RunWith(StandaloneHiveRunner.class)\npublic class HelloAnnotatedHiveRunnerTest {\n\n    /**\n     * Explicit test class configuration of the HiveRunner runtime.\n     * See {@link HiveRunnerConfig} for further details.\n     */\n    @HiveRunnerSetup\n    public final HiveRunnerConfig CONFIG = new HiveRunnerConfig() {{\n        setHiveExecutionEngine(\"mr\");\n    }};\n\n    /**\n     * Cater for all the parameters in the script that we want to test.\n     * Note that the \"hadoop.tmp.dir\" is one of the dirs defined by the test harness\n     */\n    @HiveProperties\n    public Map<String, String> hiveProperties = MapUtils.putAll(new HashMap(), new Object[]{\n            \"MY.HDFS.DIR\", \"${hadoop.tmp.dir}\",\n            \"my.schema\", \"bar\",\n    });\n\n    /**\n     * In this example, the scripts under test expects a schema to be already present in hive so\n     * we do that with a setup script.\n     * <p/>\n     * There may be multiple setup scripts but the order of execution is undefined.\n     */\n    @HiveSetupScript\n    private String createSchemaScript = \"create schema ${hiveconf:my.schema}\";\n\n    /**\n     * Create some data in the target directory. Note that the 'targetFile' references the\n     * same dir as the create table statement in the script under test.\n     * <p/>\n     * This example is for defining the data in line as a string.\n     */\n    @HiveResource(targetFile = \"${hiveconf:MY.HDFS.DIR}/foo/data_from_string.csv\")\n    private String dataFromString = \"2,World\\n3,!\";\n\n    /**\n     * Create some data in the target directory. Note that the 'targetFile' references the\n     * same dir as the create table statement in the script under test.\n     * <p/>\n     * This example is for defining the data in in a resource file.\n     */\n    @HiveResource(targetFile = \"${hiveconf:MY.HDFS.DIR}/foo/data_from_file.csv\")\n    private File dataFromFile =\n            new File(ClassLoader.getSystemResource(\"HelloHiveRunnerTest/hello_hive_runner.csv\").getPath());\n\n    /**\n     * Define the script files under test. The files will be loaded in the given order.\n     * <p/>\n     * The HiveRunner instantiate and inject the HiveShell\n     */\n    @HiveSQL(files = {\n            \"HelloHiveRunnerTest/create_table.sql\",\n            \"HelloHiveRunnerTest/create_ctas.sql\"\n    }, encoding = \"UTF-8\")\n    private HiveShell hiveShell;\n\n\n    @Test\n    public void testTablesCreated() {\n        HashSet<String> expected = Sets.newHashSet(\"foo\", \"foo_prim\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell.executeQuery(\"show tables\"));\n\n        Assert.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void testSelectFromFooWithCustomDelimiter() {\n        HashSet<String> expected = Sets.newHashSet(\"3,!\", \"2,World\", \"1,Hello\", \"N/A,bar\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell.executeQuery(\"select * from foo\", \",\", \"N/A\"));\n        Assert.assertEquals(expected, actual);\n    }\n\n    @Test\n    public void testSelectFromFooWithTypeCheck() {\n\n        List<Object[]> actual = hiveShell.executeStatement(\"select * from foo order by i\");\n\n        Assert.assertArrayEquals(new Object[]{null, \"bar\"}, actual.get(0));\n        Assert.assertArrayEquals(new Object[]{1, \"Hello\"}, actual.get(1));\n        Assert.assertArrayEquals(new Object[]{2, \"World\"}, actual.get(2));\n        Assert.assertArrayEquals(new Object[]{3, \"!\"}, actual.get(3));\n    }\n\n    @Test\n    public void testSelectFromCtas() {\n        HashSet<String> expected = Sets.newHashSet(\"Hello\", \"World\", \"!\");\n        HashSet<String> actual = Sets.newHashSet(hiveShell\n                .executeQuery(\"select a.s from (select s, i from foo_prim order by i) a where a.i is not null\"));\n        Assert.assertEquals(expected, actual);\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/junit4/HelloHiveRunnerTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples.junit4;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\n\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.StandaloneHiveRunner;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.nio.file.Paths;\nimport java.util.List;\n\n/**\n * A basic Hive Runner example showing how to setup the test source database and target database, execute the query\n * and then validate the result.\n *\n * In this example we want to test some very simple code, calculate_max.sql, that calculate a max value by year.\n *\n * <p/>\n * All HiveRunner tests should run with the StandaloneHiveRunner and have a reference to HiveShell.\n */\n@RunWith(StandaloneHiveRunner.class)\npublic class HelloHiveRunnerTest {\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    @Before\n    public void setupSourceDatabase() {\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table (\")\n                .append(\"year STRING, value INT\")\n                .append(\")\")\n                .toString());\n    }\n\n    @Before\n    public void setupTargetDatabase() {\n        shell.execute(Paths.get(\"src/test/resources/HelloHiveRunnerTest/create_max.sql\"));\n    }\n\n    @Test\n    public void testMaxValueByYear() {\n        /*\n         * Insert some source data\n         */\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withColumns(\"year\", \"value\")\n                .addRow(\"2014\", 3)\n                .addRow(\"2014\", 4)\n                .addRow(\"2015\", 2)\n                .addRow(\"2015\", 5)\n                .commit();\n\n        /*\n         * Execute the query\n         */\n        shell.execute(Paths.get(\"src/test/resources/HelloHiveRunnerTest/calculate_max.sql\"));\n\n        /*\n         * Verify the result\n         */\n        List<Object[]> result = shell.executeStatement(\"select * from my_schema.result\");\n\n        assertEquals(2, result.size());\n        assertArrayEquals(new Object[]{\"2014\", 4}, result.get(0));\n        assertArrayEquals(new Object[]{\"2015\", 5}, result.get(1));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/junit4/InsertTestDataTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples.junit4;\n\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.StandaloneHiveRunner;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport com.klarna.hiverunner.data.TsvFileParser;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TestName;\nimport org.junit.runner.RunWith;\n\nimport java.io.File;\nimport java.util.Arrays;\nimport java.util.List;\n\n/*\n    This example is intended to be a small show case for some of the ways of setting up your test data in HiveRunner.\n    It will only print out some result and thus is not a strict unit test suite.\n    The examples will go through cases with adding test data from \"code\" or from file, and how you only need to supply\n    a selected subset of the columns or how to use more advanced features like files with custom separator characters\n    or custom NULL keywords in the test data files.\n */\n@RunWith(StandaloneHiveRunner.class)\npublic class InsertTestDataTest {\n    @Rule\n    public TestName name = new TestName();\n\n    @HiveSQL(files = {})\n    private HiveShell shell;\n\n    @Before\n    public void setupDatabase() {\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table (\")\n                .append(\"col_a STRING, col_b INT, col_c BOOLEAN\")\n                .append(\")\")\n                .toString());\n    }\n\n\n    @Test\n    public void insertRowsFromCode() {\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withAllColumns()\n                .addRow(\"Value1\", 1, true)\n                .addRow(\"Value2\", 99, false)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"));\n    }\n\n\n    @Test\n    public void insertRowsFromCodeWithSelectedColumns() {\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withColumns(\"col_a\", \"col_c\")\n                .addRow(\"Value1\", true)\n                .addRow(\"Value2\", false)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"));\n    }\n\n\n    @Test\n    public void insertRowsFromTsvFile() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/data1.tsv\");\n        shell.insertInto(\"source_db\", \"test_table\")\n                .withAllColumns()\n                .addRowsFromTsv(dataFile)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"));\n    }\n\n\n    @Test\n    public void insertRowsFromTsvFileWithHeader() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/dataWithHeader1.tsv\");\n        TsvFileParser parser = new TsvFileParser().withHeader();\n        shell.insertInto(\"source_db\", \"test_table\")\n                .addRowsFrom(dataFile, parser)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"));\n    }\n\n    @Test\n    public void insertRowsFromTsvFileWithSubsetHeader() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/dataWithHeader2.tsv\");\n        TsvFileParser parser = new TsvFileParser().withHeader();\n        shell.insertInto(\"source_db\", \"test_table\")\n                .addRowsFrom(dataFile, parser)\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table\"));\n    }\n\n\n    @Test\n    public void insertRowsIntoPartitionedTableStoredAsSequencefileWithCustomDelimiterAndNullValue() {\n        File dataFile = new File(\"src/test/resources/InsertTestDataTest/data2.tsv\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.test_table2 (\")\n                .append(\"col_a STRING, col_b INT\")\n                .append(\")\")\n                .append(\"partitioned by (col_c string)\")\n                .append(\"stored as SEQUENCEFILE\")\n                .toString());\n\n        shell.insertInto(\"source_db\", \"test_table2\")\n                .withAllColumns()\n                .addRowsFrom(dataFile, new TsvFileParser().withDelimiter(\":\").withNullValue(\"__NULL__\"))\n                .commit();\n\n        printResult(shell.executeStatement(\"select * from source_db.test_table2\"));\n    }\n\n\n    public void printResult(List<Object[]> result) {\n        System.out.println(String.format(\"Result from %s:\", name.getMethodName()));\n        for (Object[] row : result) {\n            System.out.println(Arrays.asList(row));\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/examples/junit4/SetHiveConfValuesTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.examples.junit4;\n\nimport com.klarna.hiverunner.HiveShell;\nimport com.klarna.hiverunner.StandaloneHiveRunner;\nimport com.klarna.hiverunner.annotations.HiveSQL;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport java.util.List;\n\n/*\n    This example is intended to show how to set HiveConf (or HiveVar) values in HIveRunner.\n    HiveConf can be very useful. For instance you might have a global cutoff value that could be set outside your code\n    and used in many places in your queries. Common example would be a threshold value or a cutoff timestamp.\n    To use the HiveConf values in HiveRunner, you must first make sure to switch off the autoStart flag. Then you can\n    set the HiveConf values and, before executing any queries, manually start the HIveRunner shell. Make sure this is\n    done first in your test setup. like shown in the example below.\n */\n@RunWith(StandaloneHiveRunner.class)\npublic class SetHiveConfValuesTest {\n\n    @HiveSQL(files = {}, autoStart = false)\n    private HiveShell shell;\n\n    @Before\n    public void setupDatabases() {\n        shell.setHiveConfValue(\"cutoff\", \"50\");\n        shell.start();\n\n        shell.execute(\"CREATE DATABASE source_db\");\n        shell.execute(new StringBuilder()\n                .append(\"CREATE TABLE source_db.table_a (\")\n                .append(\"message STRING, value INT\")\n                .append(\")\")\n                .toString());\n\n        shell.insertInto(\"source_db\", \"table_a\")\n                .withAllColumns()\n                .addRow(\"An ignored message\", 1)\n                .addRow(\"Hello\", 51)\n                .addRow(\"World\", 99)\n                .commit();\n    }\n\n    @Test\n    public void useHiveConfValues() {\n        List<Object[]> result = shell.executeStatement(\"select message from source_db.table_a where value > ${hiveconf:cutoff}\");\n\n        for (Object[] row : result) {\n            System.out.print(row[0] + \" \");\n        }\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/io/IgnoreClosePrintStreamTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.io;\n\nimport static org.mockito.Mockito.never;\nimport static org.mockito.Mockito.verify;\n\nimport java.io.PrintStream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\n@ExtendWith(MockitoExtension.class)\npublic class IgnoreClosePrintStreamTest {\n\n    @Mock\n    private PrintStream printStream;\n\n    @Test\n    public void closeIgnored() {\n        IgnoreClosePrintStream ignoreClosePrintStream = new IgnoreClosePrintStream(printStream);\n        ignoreClosePrintStream.close();\n        verify(printStream, never()).close();\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/AbstractImportPostProcessorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport static java.util.Collections.singletonList;\n\nimport static org.hamcrest.CoreMatchers.equalTo;\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport java.nio.file.Paths;\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\n\n@ExtendWith(MockitoExtension.class)\npublic class AbstractImportPostProcessorTest {\n\n    private static final String PATH = \"path\";\n    private static final String IMPORT_STATEMENT = \"importStatement\";\n    private static final String NON_IMPORT_STATEMENT = \"nonImportStatement\";\n\n    @Mock\n    private StatementLexer lexer;\n\n    private List<String> expected;\n\n    @BeforeEach\n    public void setup() {\n        expected = singletonList(NON_IMPORT_STATEMENT);\n    }\n\n    @Test\n    public void scriptImport() {\n        when(lexer.applyToPath(Paths.get(PATH))).thenReturn(expected);\n        PostProcessor processor = new TestAbstractImportPostProcessor(true, PATH, lexer);\n        List<String> actual = processor.statement(IMPORT_STATEMENT);\n        assertThat(actual, is(equalTo(expected)));\n    }\n\n    @Test\n    public void nonScriptImport() {\n        PostProcessor processor = new TestAbstractImportPostProcessor(false, null, lexer);\n        List<String> actual = processor.statement(NON_IMPORT_STATEMENT);\n        assertThat(actual, is(equalTo(expected)));\n    }\n\n    private static class TestAbstractImportPostProcessor extends AbstractImportPostProcessor {\n\n        private final String path;\n        private final boolean isImport;\n\n        public TestAbstractImportPostProcessor(boolean isImport, String path, StatementLexer lexer) {\n            super(lexer);\n            this.isImport = isImport;\n            this.path = path;\n        }\n\n        @Override\n        public String getImportPath(String statement) {\n            return path;\n        }\n\n        @Override\n        public boolean isImport(String statement) {\n            return isImport;\n        }\n\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/CommandShellEmulatorFactoryTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport static org.hamcrest.CoreMatchers.equalTo;\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport static com.klarna.hiverunner.sql.cli.CommandShellEmulatorFactory.valueOf;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.Test;\n\nimport com.klarna.hiverunner.sql.cli.beeline.BeelineEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.HiveCliEmulator;\nimport com.klarna.hiverunner.sql.cli.hive.PreV200HiveCliEmulator;\n\npublic class CommandShellEmulatorFactoryTest {\n\n    @Test\n    public void beeline() {\n        assertThat(valueOf(\"beeline\"), is(equalTo((CommandShellEmulator) BeelineEmulator.INSTANCE)));\n        assertThat(valueOf(\"BEELINE\"), is(equalTo((CommandShellEmulator) BeelineEmulator.INSTANCE)));\n        assertThat(valueOf(\" bEeLiNe  \"), is(equalTo((CommandShellEmulator) BeelineEmulator.INSTANCE)));\n    }\n\n    @Test\n    public void hiveCli() {\n        assertThat(valueOf(\"hive_cli\"), is(equalTo((CommandShellEmulator) HiveCliEmulator.INSTANCE)));\n        assertThat(valueOf(\"HIVE_CLI\"), is(equalTo((CommandShellEmulator) HiveCliEmulator.INSTANCE)));\n        assertThat(valueOf(\" hIvE_cLi  \"), is(equalTo((CommandShellEmulator) HiveCliEmulator.INSTANCE)));\n    }\n\n    @Test\n    public void hiveCliPreV130() {\n        assertThat(valueOf(\"hive_cli_pre_v200\"), is(equalTo((CommandShellEmulator) PreV200HiveCliEmulator.INSTANCE)));\n        assertThat(valueOf(\"HIVE_CLI_PRE_V200\"), is(equalTo((CommandShellEmulator) PreV200HiveCliEmulator.INSTANCE)));\n        assertThat(valueOf(\" hIvE_cLi_PrE_v200  \"), is(equalTo((CommandShellEmulator) PreV200HiveCliEmulator.INSTANCE)));\n    }\n\n    @Test\n    public void unknown() {\n        Assertions.assertThrows(IllegalArgumentException.class, () -> {\n            valueOf(\"unknown\");\n        });\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/CommentUtilTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli;\n\nimport static org.hamcrest.CoreMatchers.equalTo;\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport static com.klarna.hiverunner.sql.cli.CommentUtil.stripFullLineComments;\n\nimport org.junit.jupiter.api.Test;\n\npublic class CommentUtilTest {\n\n    @Test\n    public void nothingToStrip() {\n        assertThat(stripFullLineComments(\"a;\\nb;\\n\"), is(equalTo(\"a;\\nb;\")));\n    }\n\n    @Test\n    public void commentToStrip() {\n        assertThat(stripFullLineComments(\"a;\\n-- comment\\nb;\\n\"), is(equalTo(\"a;\\nb;\")));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/beeline/BeelineEmulatorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport org.junit.jupiter.api.Test;\n\npublic class BeelineEmulatorTest {\n\n    @Test\n    public void testFullLineCommentAndSetStatementBeeLine() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(BeelineEmulator.INSTANCE.preProcessor().statement(hiveSql), is(\"set x=1;\"));\n    }\n\n    @Test\n    public void testFullLineCommentStatementBeeLine() {\n        String hiveSql = \"-- hello\";\n        assertThat(BeelineEmulator.INSTANCE.preProcessor().statement(hiveSql), is(\"\"));\n    }\n\n    @Test\n    public void testFullLineCommentAndSetScriptBeeLine() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(BeelineEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"set x=1;\"));\n    }\n\n    @Test\n    public void testFullLineCommentScriptBeeLine() {\n        String hiveSql = \"-- hello\";\n        assertThat(BeelineEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/beeline/BeelineStatementSplitterTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.google.common.base.Joiner;\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.sql.HiveRunnerStatement;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\n\npublic class BeelineStatementSplitterTest {\n\n    private StatementSplitter splitter = new StatementSplitter(BeelineEmulator.INSTANCE);\n\n    private List<Statement> asStatementList(String... strings) {\n        List<Statement> statements = new ArrayList<>();\n        int index = 0;\n        for (String string : strings) {\n            statements.add(new HiveRunnerStatement(index++, string));\n        }\n        return statements;\n    }\n\n    @Test\n    public void testSplitBasic() {\n        String str = \"foo;bar;baz\";\n        List<Statement> expected = asStatementList(\"foo\", \"bar\", \"baz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testRemoveTrailingSemiColon() {\n        String str = \";foo;bar;baz;\";\n        List<Statement> expected = asStatementList(\"foo\", \"bar\", \"baz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardRedundantSemiColons() {\n        String str = \"a;b;;;c\";\n        List<Statement> expected = asStatementList(\"a\", \"b\", \"c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardTrailingSpace() {\n        String str = \"a;   b\\t\\n   ;  \\n\\tc   c;\";\n        List<Statement> expected = asStatementList(\"a\", \"   b\\t\", \"\\tc   c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardEmptyStatements() {\n        String str = \"a;b;     \\t\\n   ;c;\";\n        List<Statement> expected = asStatementList(\"a\", \"b\", \"c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentPreserved() {\n        String str = \"foo -- bar\";\n        List<Statement> expected = asStatementList(\"foo -- bar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithSingleQuote() {\n        String str = \"foo -- b'ar\";\n        List<Statement> expected = asStatementList(\"foo -- b'ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithDoubleQuote() {\n        String str = \"foo -- b\\\"ar\";\n        List<Statement> expected = asStatementList(\"foo -- b\\\"ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithSemiColon() {\n        String str = \"foo -- b;ar\";\n        List<Statement> expected = asStatementList(\"foo -- b;ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testMultilineStatementWithComment() {\n        String str = \"foo -- b;ar\\nbaz\";\n        List<Statement> expected = asStatementList(\"foo -- b;ar\\nbaz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testRealLifeExample() {\n        String firstStatement = \"CREATE TABLE serde_test (\\n\" + \"  key STRING,\\n\" + \"  value STRING\\n\" + \")\\n\"\n                + \"ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'\\n\" + \"WITH SERDEPROPERTIES  (\\n\"\n                + \"\\\"input.regex\\\" = \\\"(.*);\\\"                                       \\n\" + \")\\n\"\n                + \"STORED AS TEXTFILE\\n\" + \"LOCATION '${hiveconf:hadoop.tmp.dir}/serde'\";\n\n        String secondStatamenet = \"select * from foobar\";\n\n        assertEquals(asStatementList(firstStatement, secondStatamenet),\n                splitter.split(firstStatement + \";\\n\" + secondStatamenet + \";\\n\"));\n    }\n\n    @Test\n    public void realLifeWithComments() {\n        String firstStatement = \"CREATE TABLE ${hiveconf:TARGET_SCHEMA_NAME}.pacc_pstatus (\\n\"\n                + \"  cid\\tstring, -- The cid of the transaction the balance change is connected to\\n\"\n                + \"  create_date string , -- the date of the pstatus change\\n\"\n                + \"  old_pstatus string, -- The pstatus before the change\\n\"\n                + \"  new_pstatus string, -- The pstatus after the change\\n\"\n                + \"  manual boolean -- true of the pstatus change is manual, currently false for all changes \"\n                + \"since we can't know about manual pstatus changes\\n\"\n                + \"  -- PRIMARY KEY() -- there no natural primary key for this table, should we add one, e.g. \"\n                + \"rowno?\\n\" + \"  )\";\n\n        assertEquals(asStatementList(firstStatement), splitter.split(firstStatement + \";\\n\"));\n    }\n\n    @Test\n    public void testPreserveQuoted() {\n        String[] source = new String[]{\"\\\"foo\\\"\", \"'bar'\", \"\\\"\\''\\\"\", \"'\\\"\\\\\\\"'\", \"';'\", \"\\\";\\\"\"};\n        List<Statement> expected = asStatementList(source);\n        String input = Joiner.on(\";\").join(source);\n        assertEquals(expected, splitter.split(input));\n    }\n\n    @Test\n    public void beelineSqlLineCommandsAreSupported() {\n        String statementA = \"!run script.sql\";\n        String statementB = \"select * from table where foo != bar\";\n        String statementC = \"!run another_script.sql\";\n\n        List<Statement> expected = asStatementList(statementA, statementB, \"   \" + statementC);\n        String expression = statementA + '\\n' + statementB + \";   \" + statementC;\n\n        assertEquals(expected, splitter.split(expression));\n    }\n\n    @Test\n    public void testReadUntilEndOfLine() {\n        assertEquals(asStatementList(\"foo\\nbar\\n\\n\\nbaz\"), splitter.split(\"foo\\nbar\\n\\n\\nbaz\"));\n    }\n\n    @Test\n    public void testReadQuoted() {\n        String firstQuote = \"\\\"foo;\\\\; b  a r\\\\\\\"\\\"\";\n        String secondQuote = \"'foo;\\\\; \\\\'b  a r\\\\\\\"'\";\n        String expectedTail = \"'\\'\\\"foxlov  e \\\"\";\n\n        String expression = firstQuote + secondQuote + expectedTail;\n\n        assertEquals(asStatementList(\"\\\"foo;\\\\; b  a r\\\\\\\"\\\"'foo;\\\\; \\\\'b  a r\\\\\\\"'''\\\"foxlov  e \\\"\"),\n                splitter.split(expression));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/beeline/RunCommandPostProcessorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport static java.util.Arrays.asList;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport java.nio.file.Paths;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Assertions;\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.AbstractImportPostProcessor;\n\n@ExtendWith(MockitoExtension.class)\npublic class RunCommandPostProcessorTest {\n\n    @Mock\n    private StatementLexer lexer;\n\n    private AbstractImportPostProcessor processor;\n\n    @BeforeEach\n    public void setup() {\n        processor = new RunCommandPostProcessor(lexer);\n    }\n\n    @Test\n    public void isImport() {\n        assertThat(processor.isImport(\"!run x;\"), is(true));\n    }\n\n    @Test\n    public void isImportSpaces() {\n        assertThat(processor.isImport(\"   !run x   ;   \"), is(true));\n    }\n\n    @Test\n    public void isNotImport() {\n        assertThat(processor.isImport(\"SELECT * FROM x;\"), is(false));\n    }\n\n    @Test\n    public void importPathValid() {\n        assertThat(processor.getImportPath(\"!run x\"), is(\"x\"));\n    }\n\n    @Test\n    public void importPathInvalid() {\n        Assertions.assertThrows(IllegalArgumentException.class, () -> processor.getImportPath(\"!run;\"));\n    }\n\n    @Test\n    public void importStatement() {\n        List<String> expected = asList(\"statement x\");\n        when(lexer.applyToPath(Paths.get(\"x\"))).thenReturn(expected);\n\n        assertThat(processor.statement(\"!run x\"), is(expected));\n    }\n\n    @Test\n    public void importStatementSpaces() {\n        List<String> expected = asList(\"statement x\");\n        when(lexer.applyToPath(Paths.get(\"x\"))).thenReturn(expected);\n\n        assertThat(processor.statement(\"   !run x   \"), is(expected));\n    }\n\n    @Test\n    public void generalStatement() {\n        List<String> expected = asList(\"SELECT * FROM x\");\n        assertThat(processor.statement(\"SELECT * FROM x\"), is(expected));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/beeline/SqlLineCommandRuleTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.beeline;\n\nimport static org.mockito.Matchers.any;\nimport static org.mockito.Mockito.never;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.klarna.hiverunner.sql.split.Consumer;\nimport com.klarna.hiverunner.sql.split.Context;\n\n@ExtendWith(MockitoExtension.class)\npublic class SqlLineCommandRuleTest {\n\n    @Mock\n    private Context context;\n\n    @Test\n    public void handleStart() {\n        when(context.statement()).thenReturn(\" \");\n        SqlLineCommandRule.INSTANCE.handle(\"token\", context);\n        verify(context).append(\"token\");\n        verify(context).appendWith(Consumer.UNTIL_EOL);\n        verify(context).flush();\n    }\n\n    @Test\n    public void handleOther() {\n        when(context.statement()).thenReturn(\"statement\");\n        SqlLineCommandRule.INSTANCE.handle(\"token\", context);\n        verify(context).append(\"token\");\n        verify(context, never()).appendWith(any(Consumer.class));\n        verify(context, never()).flush();\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/hive/HiveCliEmulatorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport org.junit.jupiter.api.Test;\n\npublic class HiveCliEmulatorTest {\n    @Test\n    public void testFullLineCommentAndSetStatementHiveCli() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(HiveCliEmulator.INSTANCE.preProcessor().statement(hiveSql), is(\"set x=1;\"));\n    }\n\n    @Test\n    public void testFullLineCommentStatementHiveCli() {\n        String hiveSql = \"-- hello\";\n        assertThat(HiveCliEmulator.INSTANCE.preProcessor().statement(hiveSql), is(\"\"));\n    }\n\n    @Test\n    public void testFullLineCommentAndSetScriptHiveCli() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(HiveCliEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"set x=1;\"));\n    }\n\n    @Test\n    public void testFullLineCommentScriptHiveCli() {\n        String hiveSql = \"-- hello\";\n        assertThat(HiveCliEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/hive/HiveCliStatementSplitterTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.google.common.base.Joiner;\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.sql.HiveRunnerStatement;\nimport com.klarna.hiverunner.sql.split.StatementSplitter;\n\npublic class HiveCliStatementSplitterTest {\n\n    private StatementSplitter splitter = new StatementSplitter(HiveCliEmulator.INSTANCE);\n\n    private List<Statement> asStatementList(String... strings) {\n        List<Statement> statements = new ArrayList<>();\n        int index = 0;\n        for (String string : strings) {\n            statements.add(new HiveRunnerStatement(index++, string));\n        }\n        return statements;\n    }\n\n    @Test\n    public void testSplitBasic() {\n        String str = \"foo;bar;baz\";\n        List<Statement> expected = asStatementList(\"foo\", \"bar\", \"baz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testRemoveTrailingSemiColon() {\n        String str = \";foo;bar;baz;\";\n        List<Statement> expected = asStatementList(\"foo\", \"bar\", \"baz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardRedundantSemiColons() {\n        String str = \"a;b;;;c\";\n        List<Statement> expected = asStatementList(\"a\", \"b\", \"c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardTrailingSpace() {\n        String str = \"a;   b\\t\\n   ;  \\n\\tc   c;\";\n        List<Statement> expected = asStatementList(\"a\", \"   b\\t\", \"\\tc   c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testDiscardEmptyStatements() {\n        String str = \"a;b;     \\t\\n   ;c;\";\n        List<Statement> expected = asStatementList(\"a\", \"b\", \"c\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentPreserved() {\n        String str = \"foo -- bar\";\n        List<Statement> expected = asStatementList(\"foo -- bar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithSingleQuote() {\n        String str = \"foo -- b'ar\";\n        List<Statement> expected = asStatementList(\"foo -- b'ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithDoubleQuote() {\n        String str = \"foo -- b\\\"ar\";\n        List<Statement> expected = asStatementList(\"foo -- b\\\"ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testCommentWithSemiColon() {\n        String str = \"foo -- b;ar\";\n        List<Statement> expected = asStatementList(\"foo -- b;ar\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testMultilineStatementWithComment() {\n        String str = \"foo -- b;ar\\nbaz\";\n        List<Statement> expected = asStatementList(\"foo -- b;ar\\nbaz\");\n        assertEquals(expected, splitter.split(str));\n    }\n\n    @Test\n    public void testRealLifeExample() {\n        String firstStatement = \"CREATE TABLE serde_test (\\n\" + \"  key STRING,\\n\" + \"  value STRING\\n\" + \")\\n\"\n                + \"ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'\\n\" + \"WITH SERDEPROPERTIES  (\\n\"\n                + \"\\\"input.regex\\\" = \\\"(.*);\\\"                                       \\n\" + \")\\n\"\n                + \"STORED AS TEXTFILE\\n\" + \"LOCATION '${hiveconf:hadoop.tmp.dir}/serde'\";\n\n        String secondStatement = \"select * from foobar\";\n\n        assertEquals(asStatementList(firstStatement, secondStatement),\n                splitter.split(firstStatement + \";\\n\" + secondStatement + \";\\n\"));\n    }\n\n    @Test\n    public void realLifeWithComments() {\n        String firstStatement = \"CREATE TABLE ${hiveconf:TARGET_SCHEMA_NAME}.pacc_pstatus (\\n\"\n                + \"  cid\\tstring, -- The cid of the transaction the balance change is connected to\\n\"\n                + \"  create_date string , -- the date of the pstatus change\\n\"\n                + \"  old_pstatus string, -- The pstatus before the change\\n\"\n                + \"  new_pstatus string, -- The pstatus after the change\\n\"\n                + \"  manual boolean -- true of the pstatus change is manual, currently false for all changes \"\n                + \"since we can't know about manual pstatus changes\\n\"\n                + \"  -- PRIMARY KEY() -- there no natural primary key for this table, should we add one, e.g. \"\n                + \"rowno?\\n\" + \"  )\";\n\n        assertEquals(asStatementList(firstStatement), splitter.split(firstStatement + \";\\n\"));\n    }\n\n    @Test\n    public void testPreserveQuoted() {\n        String[] source = new String[]{\"\\\"foo\\\"\", \"'bar'\", \"\\\"\\''\\\"\", \"'\\\"\\\\\\\"'\", \"';'\", \"\\\";\\\"\"};\n        List<Statement> expected = asStatementList(source);\n        String input = Joiner.on(\";\").join(source);\n        assertEquals(expected, splitter.split(input));\n    }\n\n    @Test\n    public void hiveCliSourceCommandsAreSupported() {\n        String statementA = \"source script.sql\";\n        String statementB = \"select * from table where foo != bar\";\n        String statementC = \"source another_script.sql\";\n\n        List<Statement> expected = asStatementList(statementA, statementB, \"   \" + statementC);\n        String expression = statementA + \";\\n\" + statementB + \";   \" + statementC;\n\n        assertEquals(expected, splitter.split(expression));\n    }\n\n    @Test\n    public void testReadQuoted() {\n        String firstQuote = \"\\\"foo;\\\\; b  a r\\\\\\\"\\\"\";\n        String secondQuote = \"'foo;\\\\; \\\\'b  a r\\\\\\\"'\";\n        String expectedTail = \"'\\'\\\"foxlov  e \\\"\";\n\n        String expression = firstQuote + secondQuote + expectedTail;\n\n        assertEquals(asStatementList(\"\\\"foo;\\\\; b  a r\\\\\\\"\\\"'foo;\\\\; \\\\'b  a r\\\\\\\"'''\\\"foxlov  e \\\"\"),\n                splitter.split(expression));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/hive/PreV200HiveCliEmulatorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport org.junit.jupiter.api.Test;\n\npublic class PreV200HiveCliEmulatorTest {\n    @Test\n    public void testFullLineCommentAndSetStatementHiveCli() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(PreV200HiveCliEmulator.INSTANCE.preProcessor().statement(hiveSql), is(hiveSql));\n    }\n\n    @Test\n    public void testFullLineCommentStatementHiveCli() {\n        String hiveSql = \"-- hello\";\n        assertThat(PreV200HiveCliEmulator.INSTANCE.preProcessor().statement(hiveSql), is(hiveSql));\n    }\n\n    @Test\n    public void testFullLineCommentAndSetScriptHiveCli() {\n        String hiveSql = \"-- hello\\nset x=1;\";\n        assertThat(PreV200HiveCliEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"set x=1;\"));\n    }\n\n    @Test\n    public void testFullLineCommentScriptHiveCli() {\n        String hiveSql = \"-- hello\";\n        assertThat(PreV200HiveCliEmulator.INSTANCE.preProcessor().script(hiveSql), is(\"\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/cli/hive/SourceCommandPostProcessorTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.cli.hive;\n\nimport static java.util.Arrays.asList;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport java.nio.file.Paths;\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.klarna.hiverunner.sql.StatementLexer;\nimport com.klarna.hiverunner.sql.cli.AbstractImportPostProcessor;\n\n@ExtendWith(MockitoExtension.class)\npublic class SourceCommandPostProcessorTest {\n\n    @Mock\n    private StatementLexer lexer;\n\n    private AbstractImportPostProcessor processor;\n\n    @BeforeEach\n    public void setup() {\n        processor = new SourceCommandPostProcessor(lexer);\n    }\n\n    @Test\n    public void isImport() {\n        assertThat(processor.isImport(\"source x\"), is(true));\n    }\n\n    @Test\n    public void isImportSpaces() {\n        assertThat(processor.isImport(\"    source x    \"), is(true));\n    }\n\n    @Test\n    public void isImportCaseInsensitive() {\n        assertThat(processor.isImport(\"SoUrCe x\"), is(true));\n    }\n\n    @Test\n    public void isNotImport() {\n        assertThat(processor.isImport(\"SELECT * FROM x;\"), is(false));\n    }\n\n    @Test\n    public void importPathValid() {\n        assertThat(processor.getImportPath(\"source x y z\"), is(\"x y z\"));\n    }\n\n    @Test\n    public void importStatement() {\n        List<String> expected = asList(\"statement x\");\n        when(lexer.applyToPath(Paths.get(\"x\"))).thenReturn(expected);\n\n        assertThat(processor.statement(\"source x\"), is(expected));\n    }\n\n    @Test\n    public void importStatementSpaces() {\n        List<String> expected = asList(\"statement x\");\n        when(lexer.applyToPath(Paths.get(\"x\"))).thenReturn(expected);\n\n        assertThat(processor.statement(\"    source x   \"), is(expected));\n    }\n\n    @Test\n    public void generalStatement() {\n        List<String> expected = asList(\"SELECT * FROM x\");\n        assertThat(processor.statement(\"SELECT * FROM x\"), is(expected));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/BaseContextTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static java.util.Collections.singletonList;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Collections;\nimport java.util.StringTokenizer;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\n@ExtendWith(MockitoExtension.class)\npublic class BaseContextTest {\n\n    @Mock\n    private Consumer consumer;\n\n    private BaseContext context = new BaseContext(new StringTokenizer(\"\"));\n\n    @Test\n    public void appendAndFlush() {\n        context.append(\"abc\");\n        assertThat(context.getStatements(), is(Collections.<String>emptyList()));\n        context.append(\"def\");\n        context.flush();\n        assertThat(context.getStatements(), is(singletonList(\"abcdef\")));\n    }\n\n    @Test\n    public void statementAndFlush() {\n        context.append(\"abc\");\n        assertThat(context.statement(), is(\"abc\"));\n        context.flush();\n        assertThat(context.statement(), is(\"\"));\n    }\n\n    @Test\n    public void appendWith() {\n        when(consumer.consume(context)).thenReturn(\"statement\");\n        context.appendWith(consumer);\n        assertThat(context.statement(), is(\"statement\"));\n        context.flush();\n        assertThat(context.getStatements(), is(singletonList(\"statement\")));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/CloseStatementRuleTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.mockito.Mockito.verify;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\n@ExtendWith(MockitoExtension.class)\npublic class CloseStatementRuleTest {\n\n    private static TokenRule rule = CloseStatementRule.INSTANCE;\n\n    @Mock\n    private Context context;\n\n    @Test\n    public void handle() {\n        rule.handle(null, context);\n        verify(context).flush();\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/ConsumerEolTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport static com.klarna.hiverunner.sql.split.Consumer.UNTIL_EOL;\n\nimport java.util.StringTokenizer;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\n@ExtendWith(MockitoExtension.class)\npublic class ConsumerEolTest {\n\n    @Mock\n    private Context context;\n    @Mock\n    private StringTokenizer tokenizer;\n\n    @BeforeEach\n    public void setup() {\n        when(context.tokenizer()).thenReturn(tokenizer);\n    }\n\n    @Test\n    public void consumeLine() {\n        when(tokenizer.nextElement()).thenReturn(\"a\", \" \", \"b\", \"\\n\");\n        when(tokenizer.hasMoreElements()).thenReturn(true, true, true, true, false);\n        assertThat(UNTIL_EOL.consume(context), is(\"a b\\n\"));\n    }\n\n    @Test\n    public void consumeNoCR() {\n        when(tokenizer.nextElement()).thenReturn(\"a\", \" \", \"b\");\n        when(tokenizer.hasMoreElements()).thenReturn(true, true, true, false);\n        assertThat(UNTIL_EOL.consume(context), is(\"a b\"));\n    }\n\n    @Test\n    public void consumeMultiLine() {\n        when(tokenizer.nextElement()).thenReturn(\"a\", \" \", \"b\", \"\\n\", \"c\");\n        when(tokenizer.hasMoreElements()).thenReturn(true, true, true, true, true, false);\n        assertThat(UNTIL_EOL.consume(context), is(\"a b\\n\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/DefaultTokenRuleTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.mockito.Mockito.verify;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\n@ExtendWith(MockitoExtension.class)\npublic class DefaultTokenRuleTest {\n\n    private static TokenRule rule = DefaultTokenRule.INSTANCE;\n\n    @Mock\n    private Context context;\n\n    @Test\n    public void handle() {\n        rule.handle(\"x\", context);\n        verify(context).append(\"x\");\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/NewLineUtilTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport static com.klarna.hiverunner.sql.split.NewLineUtil.removeLeadingTrailingNewLines;\n\nimport org.junit.jupiter.api.Test;\n\npublic class NewLineUtilTest {\n\n    @Test\n    public void typical() {\n        assertThat(removeLeadingTrailingNewLines(\"\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \"), is(\" \"));\n        assertThat(removeLeadingTrailingNewLines(\" a \"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\"a\"), is(\"a\"));\n        assertThat(removeLeadingTrailingNewLines(\" a\"), is(\" a\"));\n        assertThat(removeLeadingTrailingNewLines(\"a \"), is(\"a \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n \\n \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n a \\n\"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n a b \\n \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n \\n a b \\n \\n \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n a b \\n \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n a \\n b \\n \"), is(\" a \\n b \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\r\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r \\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\r a \\r\"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r a b \\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r \\r a b \\r \\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r a b \\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\r a \\r b \\r \"), is(\" a \\r b \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\f\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f \\f \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\f a \\f\"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f a b \\f \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f \\f a b \\f \\f \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f a b \\f \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\f\\r\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r \\f\\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\f\\r a \\f\\r\"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r a b \\f\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r \\f\\r a b \\f\\r \\f\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r a b \\f\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\f\\r a \\f\\r b \\f\\r \"), is(\" a \\f\\r b \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n\\r\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r \\n\\r \"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n\\r a \\n\\r\"), is(\" a \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r a b \\n\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r \\n\\r a b \\n\\r \\n\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r a b \\n\\r \"), is(\" a b \"));\n        assertThat(removeLeadingTrailingNewLines(\" \\n\\r a \\n\\r b \\n\\r \"), is(\" a \\n\\r b \"));\n        assertThat(removeLeadingTrailingNewLines(\"\\t\"), is(\"\\t\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\ta\\t\"), is(\"\\ta\\t\"));\n        assertThat(removeLeadingTrailingNewLines(\"a\"), is(\"a\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\ta\"), is(\"\\ta\"));\n        assertThat(removeLeadingTrailingNewLines(\"a\\t\"), is(\"a\\t\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\t\\n\\t\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\t\\n\\t\\n\\t\"), is(\"\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\n\\ta\\t\\n\"), is(\"\\ta\\t\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\t\\n\\ta\\tb\\t\\n\\t\"), is(\"\\ta\\tb\\t\"));\n        assertThat(removeLeadingTrailingNewLines(\"\\t\\n\\t\\n\\ta\\tb\\t\\n\\t\\n\\t\"), is(\"\\ta\\tb\\t\"));\n    }\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/PreserveCommentsRuleTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.util.StringTokenizer;\n\nimport org.junit.jupiter.api.Test;\n\npublic class PreserveCommentsRuleTest {\n\n    private static TokenRule rule = PreserveCommentsRule.INSTANCE;\n\n    @Test\n    public void withInlineComment() {\n        StringTokenizer tokenizer = new StringTokenizer(\"x -- a b\\n\", \" \", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"x\"\n        rule.handle(tokenizer.nextToken(), context); // \" \"\n        rule.handle(tokenizer.nextToken(), context); // \"--\"\n        // Should find comment and read until EOL\n        assertThat(context.statement(), is(\"x -- a b\\n\"));\n    }\n\n    @Test\n    public void noComment() {\n        StringTokenizer tokenizer = new StringTokenizer(\"x a b\\n\", \" \", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"x\"\n        rule.handle(tokenizer.nextToken(), context); // \" \"\n        rule.handle(tokenizer.nextToken(), context); // \"a\"\n        rule.handle(tokenizer.nextToken(), context); // \" \"\n        rule.handle(tokenizer.nextToken(), context); // \"b\\n\"\n        assertThat(context.statement(), is(\"x a b\\n\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/PreserveQuotesRuleTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\n\nimport java.util.StringTokenizer;\n\nimport org.junit.jupiter.api.Test;\n\npublic class PreserveQuotesRuleTest {\n\n    private static TokenRule rule = PreserveQuotesRule.INSTANCE;\n\n    @Test\n    public void singleQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"'b c' d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"'\"\n        assertThat(context.statement(), is(\"'b c'\"));\n    }\n\n    @Test\n    public void singleQuotesCrossLine() {\n        StringTokenizer tokenizer = new StringTokenizer(\"'b \\n c' d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"'\"\n        assertThat(context.statement(), is(\"'b \\n c'\"));\n    }\n\n    @Test\n    public void singleEscapedQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"'b \\\\' c' d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"'\"\n        assertThat(context.statement(), is(\"'b \\\\' c'\"));\n    }\n\n    @Test\n    public void doubleQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"\\\"b c\\\" d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context);\n        assertThat(context.statement(), is(\"\\\"b c\\\"\"));\n    }\n\n    @Test\n    public void doubleQuotesCrossLine() {\n        StringTokenizer tokenizer = new StringTokenizer(\"\\\"b \\n c\\\" d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context);\n        assertThat(context.statement(), is(\"\\\"b \\n c\\\"\"));\n    }\n\n    @Test\n    public void doubleEscapedQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"\\\"b \\\\\\\" c\\\" d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context);\n        assertThat(context.statement(), is(\"\\\"b \\\\\\\" c\\\"\"));\n    }\n\n    @Test\n    public void doubleSingleQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"\\\"b ' c\\\" d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context);\n        assertThat(context.statement(), is(\"\\\"b ' c\\\"\"));\n    }\n\n    @Test\n    public void singleDoubleQuotes() {\n        StringTokenizer tokenizer = new StringTokenizer(\"'b \\\" c' d\\n\", \" '\\\"\", true);\n        Context context = new BaseContext(tokenizer);\n        rule.handle(tokenizer.nextToken(), context); // \"'\"\n        assertThat(context.statement(), is(\"'b \\\" c'\"));\n    }\n\n}\n"
  },
  {
    "path": "src/test/java/com/klarna/hiverunner/sql/split/StatementSplitterTest.java",
    "content": "/**\n * Copyright (C) 2013-2021 Klarna AB\n * Copyright (C) 2021 The HiveRunner Contributors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.klarna.hiverunner.sql.split;\n\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.mockito.Mockito.when;\n\nimport static com.klarna.hiverunner.sql.split.StatementSplitter.SQL_SPECIAL_CHARS;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.klarna.hiverunner.builder.Statement;\nimport com.klarna.hiverunner.sql.HiveRunnerStatement;\nimport com.klarna.hiverunner.sql.cli.CommandShellEmulator;\n\n// Checks the application of rules, not specific emulator implementations. See other tests for that.\n@ExtendWith(MockitoExtension.class)\npublic class StatementSplitterTest {\n\n    @Mock\n    private CommandShellEmulator emulator;\n\n    private StatementSplitter splitter;\n\n    private List<Statement> asStatementList(String... strings) {\n        List<Statement> statements = new ArrayList<>();\n        int index = 0;\n        for (String string : strings) {\n            statements.add(new HiveRunnerStatement(index++, string));\n        }\n        return statements;\n    }\n\n    @BeforeEach\n    public void setupEmulator() {\n        // Creates a simple emulator that understands ';' only\n        when(emulator.specialCharacters()).thenReturn(SQL_SPECIAL_CHARS);\n        when(emulator.splitterRules())\n                .thenReturn(Arrays.<TokenRule>asList(CloseStatementRule.INSTANCE, DefaultTokenRule.INSTANCE));\n        splitter = new StatementSplitter(emulator);\n    }\n\n    @Test\n    public void defaultRule() {\n        assertThat(splitter.split(\"foo\"), is(asStatementList(\"foo\")));\n    }\n\n    @Test\n    public void multipleRules() {\n        assertThat(splitter.split(\"foo;bar;baz\"), is(asStatementList(\"foo\", \"bar\", \"baz\")));\n    }\n\n}\n"
  },
  {
    "path": "src/test/resources/AggregateViewTest/create_table.sql",
    "content": "CREATE DATABASE db;\n\nCREATE EXTERNAL TABLE `db.mvtdescriptionchangeinfo`(\n  `timestamp` bigint COMMENT '',\n  `testid` string COMMENT '',\n  `type` string COMMENT '',\n  `contents` string COMMENT '',\n  `hostname` string COMMENT '')\nPARTITIONED BY (\n  `request_log_date` string,\n  `request_log_hour` string);\n\n  CREATE VIEW db.latestnodemvtchanges AS\n  SELECT testid, hostname, max(`timestamp`) AS mts\n  FROM db.mvtdescriptionchangeinfo\n  WHERE `timestamp` IS NOT NULL\n  GROUP BY testid, hostname;\n\n  CREATE VIEW db.latesttestchangepairs AS\n  SELECT a.testid, a.type\n  FROM db.mvtdescriptionchangeinfo a\n  INNER JOIN db.latestnodemvtchanges b ON a.testid = b.testid AND a.`timestamp` = b.mts\n  GROUP BY a.testid, a.type;\n"
  },
  {
    "path": "src/test/resources/CommentTest/comment.sql",
    "content": "-- hello\nset x=1;\n\nset y=\"\n-- goodbye\n\";\n"
  },
  {
    "path": "src/test/resources/CtasTest/ctas.sql",
    "content": "CREATE EXTERNAL TABLE foo (s1 string, s2 string)\n  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n  STORED AS TEXTFILE\n  LOCATION '${hiveconf:hadoop.tmp.dir}/foo/';\n\n\nCREATE TABLE foo_prim as select * from foo;\n\n\n"
  },
  {
    "path": "src/test/resources/HelloHiveRunnerTest/calculate_max.sql",
    "content": "insert into my_schema.result\n  select year, max(value) from source_db.test_table group by year;"
  },
  {
    "path": "src/test/resources/HelloHiveRunnerTest/create_ctas.sql",
    "content": "USE ${hiveconf:my.schema};\n\nCREATE TABLE foo_prim as select i, s from foo;\n\n\n\n"
  },
  {
    "path": "src/test/resources/HelloHiveRunnerTest/create_max.sql",
    "content": "create database my_schema;\n\nCREATE EXTERNAL TABLE my_schema.result (year STRING, value INT)\n  stored as sequencefile\n;"
  },
  {
    "path": "src/test/resources/HelloHiveRunnerTest/create_table.sql",
    "content": "USE ${hiveconf:my.schema};\n\nCREATE EXTERNAL TABLE foo (i int, s string)\n  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n  STORED AS TEXTFILE\n  LOCATION '${hiveconf:MY.HDFS.DIR}/foo/';\n"
  },
  {
    "path": "src/test/resources/HelloHiveRunnerTest/hello_hive_runner.csv",
    "content": "1,Hello\n,bar"
  },
  {
    "path": "src/test/resources/HiveRunnerAnnotationsTest/hql1.sql",
    "content": "CREATE EXTERNAL TABLE foo (s1 int, s2 string)\n  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n  STORED AS TEXTFILE\n  LOCATION '${hiveconf:hadoop.tmp.dir}/foo/';"
  },
  {
    "path": "src/test/resources/HiveRunnerAnnotationsTest/setupFile.csv",
    "content": "create table fox(id string);"
  },
  {
    "path": "src/test/resources/HiveRunnerAnnotationsTest/setupPath.csv",
    "content": "create table love(id string);"
  },
  {
    "path": "src/test/resources/HiveRunnerAnnotationsTest/testData.csv",
    "content": "5,F\n7,W"
  },
  {
    "path": "src/test/resources/HiveRunnerAnnotationsTest/testData2.csv",
    "content": "8,T\n10,Q"
  },
  {
    "path": "src/test/resources/HiveRunnerExtensionTest/test_query.sql",
    "content": "CREATE DATABASE testdb;\n\nCREATE EXTERNAL TABLE testdb.test_table\n(\n  field1 string,\n  field2 string\n)\n"
  },
  {
    "path": "src/test/resources/InsertIntoTableIntegrationTest/data.tsv",
    "content": "a1\tb1\tc1\td1\te1\na2\tb2\tc2\td2\te2\n"
  },
  {
    "path": "src/test/resources/InsertIntoTableIntegrationTest/dataWithCustomNullValue.csv",
    "content": "a1,b1,c1,d1,NULL\na2,b2,NULL,d2,e2"
  },
  {
    "path": "src/test/resources/InsertTestDataTest/data1.tsv",
    "content": "textA\t42\ttrue\ntextB\t3\ttrue\ntextC\t99\tfalse"
  },
  {
    "path": "src/test/resources/InsertTestDataTest/data2.tsv",
    "content": "textA:42:A\n__NULL__:3:A\ntextC:99:B"
  },
  {
    "path": "src/test/resources/InsertTestDataTest/dataWithHeader1.tsv",
    "content": "col_b\tcol_a\tcol_c\n42\ttextA\ttrue\n3\ttextB\ttrue\n99\ttextC\tfalse\n"
  },
  {
    "path": "src/test/resources/InsertTestDataTest/dataWithHeader2.tsv",
    "content": "col_a\tcol_c\ntextA\ttrue\ntextB\ttrue\ntextC\tfalse"
  },
  {
    "path": "src/test/resources/MethodLevelResourceTest/MethodLevelResourceTest.txt",
    "content": "1,2,3"
  },
  {
    "path": "src/test/resources/OrcSnappyTest/ctas.sql",
    "content": "CREATE EXTERNAL TABLE foo (s1 string, s2 string)\n  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n  STORED AS TEXTFILE\n  LOCATION '${hiveconf:hadoop.tmp.dir}/foo/';\n\nSET hive.default.fileformat.managed=ORC;\nSET hive.default.fileformat=ORC;\n\nCREATE TABLE foo_orc_nocomp as select * from foo;\n\nSET hive.exec.orc.default.compress=SNAPPY;\n\nSET hive.exec.compress.intermediate=true;\nSET hive.exec.compress.output=true;\n\nSET mapreduce.map.output.compress=true;\n\nSET mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;\nSET mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.SnappyCodec;\n\nSET mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.SnappyCodec;\nSET mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;\n\nCREATE TABLE foo_orc_snappy as select * from foo;\n"
  },
  {
    "path": "src/test/resources/PartitionSupportTest/hql_example.sql",
    "content": "CREATE EXTERNAL TABLE ${hiveconf:table.name} (s1 string, s2 string, s3 string)\n    PARTITIONED BY (\n        year int,\n        month int)\n    ROW FORMAT SERDE 'com.klarna.hiverunner.ToUpperCaseSerDe'\n        WITH SERDEPROPERTIES (\n            \"key\"=\"value\",\n            \"KEY\"= \"VALUE\"\n        )\n        STORED AS TEXTFILE\nLOCATION '${hiveconf:HDFS_ROOT_FOO}/foo/';\n\n\n\n\n"
  },
  {
    "path": "src/test/resources/SerdeTest/create_table.sql",
    "content": "CREATE TABLE serde_test (\n  key STRING,\n  value STRING\n)\nROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'\nWITH SERDEPROPERTIES  (\n\"input.regex\" = \"([0-9]*)#([A-Z]*).*\"\n)\nSTORED AS TEXTFILE\nLOCATION '${hiveconf:hadoop.tmp.dir}/serde';"
  },
  {
    "path": "src/test/resources/SerdeTest/hql_custom_serde.sql",
    "content": "CREATE EXTERNAL TABLE customSerdeTable (s1 string, s2 string, s3 string)\n    ROW FORMAT SERDE 'com.klarna.hiverunner.ToUpperCaseSerDe'\n        WITH SERDEPROPERTIES (\n            \"key\"=\"value\",\n            \"KEY\"= \"VALUE\"\n        )\n        STORED AS TEXTFILE\nLOCATION '${hiveconf:hadoop.tmp.dir}/customSerde';\n\n\n\n\n"
  },
  {
    "path": "src/test/resources/SetTest/test_with_set.hql",
    "content": "CREATE DATABASE testdb;\n\nSET hive.exec.dynamic.partition.mode=nonstrict;\nSET hive.exec.dynamic.partition=true;\n\nCREATE EXTERNAL TABLE testdb.test \n(\n  field1 string, \n  field2 string\n)\nSTORED AS ORC;\n"
  },
  {
    "path": "src/test/resources/TsvFileParserTest/data.csv",
    "content": "a1,b1,c1,d1,\na2,b2,,d2,e2"
  },
  {
    "path": "src/test/resources/TsvFileParserTest/data.tsv",
    "content": "a1\tb1\tc1\td1\te1\na2\tb2\tc2\td2\te2"
  },
  {
    "path": "src/test/resources/TsvFileParserTest/dataWithCustomNullValue.csv",
    "content": "a1,b1,c1,d1,NULL\na2,b2,NULL,d2,e2\n"
  },
  {
    "path": "src/test/resources/TsvFileParserTest/dataWithHeader.csv",
    "content": "a,b,c,d,e\na1,b1,c1,d1,\na2,b2,,d2,e2"
  },
  {
    "path": "src/test/resources/TsvFileParserTest/dataWithHeader.tsv",
    "content": "a\tb\tc\td\te\na1\tb1\tc1\td1\te1\na2\tb2\tc2\td2\te2"
  },
  {
    "path": "src/test/resources/log4j2.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Configuration status=\"WARN\">\n  <Appenders>\n    <Console name=\"Console\" target=\"SYSTEM_OUT\">\n      <PatternLayout\n        pattern=\"%d{ISO8601} %-5p %c:%L - %m%n\" />\n    </Console>\n    <Console name=\"Rate-Limited-Console\" target=\"SYSTEM_OUT\">\n      <PatternLayout pattern=\"%d{ISO8601} %-5p %c:%L - %m%n\" />\n      <BurstFilter level=\"WARN\" rate=\"1\" maxBurst=\"1\"/>\n    </Console>\n  </Appenders>\n  <Loggers>\n    <Logger name=\"DataNucleus\" level=\"warn\" additivity=\"false\">\n      <AppenderRef ref=\"Rate-Limited-Console\" />\n    </Logger>\n    <Logger name=\"org.apache.hadoop.hive\" level=\"warn\" />\n    <Logger name=\"org.apache.hive\" level=\"warn\" />\n    <Root level=\"info\">\n      <AppenderRef ref=\"Console\" />\n    </Root>\n  </Loggers>\n</Configuration>\n"
  }
]