[
  {
    "path": ".changes/0.0.0.md",
    "content": "## Previous Releases\n\nFor information on prior major and minor releases, see their changelogs:\n\n* [1.9](https://github.com/starburstdata/dbt-trino/blob/1.9.latest/CHANGELOG.md)\n* [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md)\n* [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md)\n* [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md)\n* [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md)\n* [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md)\n* [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md)\n* [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md)\n* [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md)\n* [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md)\n"
  },
  {
    "path": ".changes/1.10.0/Features-20251210-194211.yaml",
    "content": "kind: Features\nbody: Add support for catalog integration\ntime: 2025-12-10T19:42:11.700646+01:00\ncustom:\n  Author: damian3031\n  Issue: \"\"\n  PR: \"502\"\n"
  },
  {
    "path": ".changes/1.10.0.md",
    "content": "## dbt-trino 1.10.0 - December 16, 2025\n### Features\n- Add support for catalog integration ([#502](https://github.com/starburstdata/dbt-trino/pull/502))\n\n### Contributors\n- [@damian3031](https://github.com/damian3031) ([#502](https://github.com/starburstdata/dbt-trino/pull/502))\n"
  },
  {
    "path": ".changes/1.10.1/Dependencies-20260115-092226.yaml",
    "content": "kind: Dependencies\nbody: Bump dbt-adapters>=1.16,<2.0\ntime: 2026-01-15T09:22:26.968512-08:00\ncustom:\n    Author: zqureshi\n    Issue: \"507\"\n    PR: \"507\"\n"
  },
  {
    "path": ".changes/1.10.1.md",
    "content": "## dbt-trino 1.10.1 - January 16, 2026\n### Dependencies\n- Bump dbt-adapters>=1.16,<2.0 ([#507](https://github.com/starburstdata/dbt-trino/issues/507), [#507](https://github.com/starburstdata/dbt-trino/pull/507))\n\n### Contributors\n- [@zqureshi](https://github.com/zqureshi) ([#507](https://github.com/starburstdata/dbt-trino/pull/507))\n"
  },
  {
    "path": ".changes/header.tpl.md",
    "content": "# dbt-trino Changelog\n\n- This file provides a full account of all changes to `dbt-trino`\n- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.\n- \"Breaking changes\" listed under a version may require action from end users or external maintainers when upgrading to that version.\n- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry)\n"
  },
  {
    "path": ".changes/unreleased/.gitkeep",
    "content": ""
  },
  {
    "path": ".changie.yaml",
    "content": "changesDir: .changes\nunreleasedDir: unreleased\nheaderPath: header.tpl.md\nversionHeaderPath: \"\"\nchangelogPath: CHANGELOG.md\nversionExt: md\nversionFormat: '## dbt-trino {{.Version}} - {{.Time.Format \"January 02, 2006\"}}'\nkindFormat: '### {{.Kind}}'\nchangeFormat: '- {{.Body}} ({{if ne .Custom.Issue \"\"}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))'\n\nkinds:\n  - label: Breaking Changes\n  - label: Features\n  - label: Fixes\n  - label: Under the Hood\n  - label: Dependencies\n    changeFormat: '- {{.Body}} ({{if ne .Custom.Issue \"\"}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))'\n  - label: Security\n    changeFormat: '- {{.Body}} ({{if ne .Custom.Issue \"\"}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))'\n\nnewlines:\n  beforeChangelogHeader: 1\n\ncustom:\n  - key: Author\n    label: GitHub Username(s) (separated by a single space if multiple)\n    type: string\n    minLength: 3\n  - key: Issue\n    label: GitHub Issue Number\n    type: int\n    minInt: 1\n    optional: true\n  - key: PR\n    label: GitHub Pull Request Number\n    type: int\n    minInt: 1\n\nfooterFormat: |\n  {{- $contributorDict := dict }}\n  {{- range $change := .Changes }}\n    {{- $authorList := splitList \" \" $change.Custom.Author }}\n    {{- /* loop through all authors for a PR */}}\n    {{- range $author := $authorList }}\n      {{- $authorLower := lower $author }}\n      {{- $prLink := $change.Kind }}\n      {{- $prLink = \"[#pr](https://github.com/starburstdata/dbt-trino/pull/pr)\" | replace \"pr\" $change.Custom.PR }}\n      {{- /* check if this contributor has other PRs associated with them already */}}\n      {{- if hasKey $contributorDict $author }}\n        {{- $prList := get $contributorDict $author }}\n        {{- $prList = append $prList $prLink  }}\n        {{- $contributorDict := set $contributorDict $author $prList }}\n      {{- else }}\n        {{- $prList := list $prLink }}\n        {{- $contributorDict := set $contributorDict $author $prList }}\n      {{- end }}\n    {{- end}}\n  {{- end }}\n  {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}\n  {{- if $contributorDict}}\n  ### Contributors\n  {{- range $k,$v := $contributorDict }}\n  - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})\n  {{- end }}\n  {{- end }}\n"
  },
  {
    "path": ".flake8",
    "content": "[flake8]\nselect =\n    E\n    W\n    F\nignore =\n    W503,\n    W504,\n    E203,\n    E741,\n    E501,\nexclude = test\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yml",
    "content": "---\nname: Bug report\ndescription: Report a bug or an issue you've found with dbt-trino\nlabels: bug\nbody:\n  - type: textarea\n    attributes:\n      label: Expected behavior\n      description: What do you think should have happened\n      placeholder: >\n        A clear and concise description of what you expected to happen.\n    validations:\n      required: true\n  - type: textarea\n    attributes:\n      label: Actual behavior\n      description: Describe what actually happened\n      placeholder: >\n        A clear and concise description of what actually happened.\n    validations:\n      required: true\n  - type: textarea\n    attributes:\n      label: Steps To Reproduce\n      description: This will help us reproduce your issue\n      placeholder: >\n        In as much detail as possible, please provide steps to reproduce the issue.\n        Sample code that triggers the issue, relevant server settings, etc is all very helpful here.\n    validations:\n      required: true\n  - type: textarea\n    attributes:\n      label: Log output/Screenshots\n      description: What do you think went wrong?\n      placeholder: >\n        If applicable, add log output and/or screenshots to help explain your problem.\n  - type: input\n    attributes:\n      label: Operating System\n      description: What Operating System are you using?\n      placeholder: \"You can get it via `cat /etc/os-release` for example\"\n    validations:\n      required: true\n  - type: input\n    attributes:\n      label: dbt version\n      description: \"Execute `dbt --version`\"\n      placeholder: Which version of dbt are you using?\n    validations:\n      required: true\n  - type: input\n    attributes:\n      label: Trino Server version\n      description: \"Run `SELECT VERSION();` on your Trino server\"\n      placeholder: Which Trino server version are you using?\n    validations:\n      required: true\n  - type: input\n    attributes:\n      label: Python version\n      description: \"You can get it via executing `python --version`\"\n      placeholder: What Python version are you using?\n    validations:\n      required: true\n  - type: checkboxes\n    attributes:\n      label: Are you willing to submit PR?\n      description: >\n        This is absolutely not required, but we are happy to guide you in the contribution process\n        especially if you already have a good understanding of how to implement the feature.\n      options:\n        - label: Yes I am willing to submit a PR!\n  - type: markdown\n    attributes:\n      value: \"Thanks for completing our form!\"\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "content": "---\ncontact_links:\n  - name: Ask a question or get help around `dbt-trino` on Slack\n    url: https://getdbt.slack.com/channels/db-presto-trino\n    about: Get help and share your experiences around `dbt-trino` with the `dbt` Slack community.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.yml",
    "content": "---\nname: Feature request\ndescription: Suggest an idea for dbt-trino\nlabels: enhancement\nbody:\n  - type: textarea\n    attributes:\n      label: Describe the feature\n      description: What would you like to happen?\n      placeholder: >\n        A clear and concise description of what you want to happen\n        and what problem it would solve.\n    validations:\n      required: true\n  - type: textarea\n    attributes:\n      label: Describe alternatives you've considered\n      description: What did you try to make it happen?\n      placeholder: >\n        A clear and concise description of any alternative solutions or features you've considered.\n  - type: textarea\n    attributes:\n      label: Who will benefit?\n      placeholder: >\n        What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.\n  - type: checkboxes\n    attributes:\n      label: Are you willing to submit PR?\n      description: >\n        This is absolutely not required, but we are happy to guide you in the contribution process\n        especially if you already have a good understanding of how to implement the feature.\n      options:\n        - label: Yes I am willing to submit a PR!\n  - type: markdown\n    attributes:\n      value: \"Thanks for completing our form!\"\n"
  },
  {
    "path": ".github/dependabot.yml",
    "content": "version: 2\nupdates:\n  # python dependencies\n  - package-ecosystem: \"pip\"\n    directory: \"/\"\n    schedule:\n      interval: \"daily\"\n    rebase-strategy: \"disabled\"\n    labels:\n      - \"Skip Changelog\"\n      - \"dependencies\"\n  - package-ecosystem: \"github-actions\"\n    directory: \"/\"\n    schedule:\n      interval: \"weekly\"\n    rebase-strategy: \"disabled\"\n"
  },
  {
    "path": ".github/pull_request_template.md",
    "content": "## Overview\n<!---\n  Include the number of the issue addressed by this PR above if applicable.\n  PRs for code changes without an associated issue *will not be merged*.\n  See CONTRIBUTING.md for more information.\n\n  Example:\n    resolves #1234\n-->\n\n## Checklist\n\n- [ ] I have run this code in development and it appears to resolve the stated issue\n- [ ] This PR includes tests, or tests are not required/relevant for this PR\n- [ ] `README.md` updated and added information about my change\n- [ ] I have run `changie new` to [create a changelog entry](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#Adding-CHANGELOG-Entry)\n"
  },
  {
    "path": ".github/workflows/bot-changelog.yml",
    "content": "# **what?**\n# When bots create a PR, this action will add a corresponding changie yaml file to that\n# PR when a specific label is added.\n#\n# The file is created off a template:\n#\n# kind: <per action matrix>\n# body: <PR title>\n# time: <current timestamp>\n# custom:\n#   Author: <PR User Login (generally the bot)>\n#   Issue: 4904\n#   PR: <PR number>\n#\n# **why?**\n# Automate changelog generation for more visability with automated bot PRs.\n#\n# **when?**\n# Once a PR is created, label should be added to PR before or after creation. You can also\n#  manually trigger this by adding the appropriate label at any time.\n#\n# **how to add another bot?**\n# Add the label and changie kind to the include matrix.  That's it!\n#\n\nname: Bot Changelog\n\non:\n  pull_request:\n    # catch when the PR is opened with the label or when the label is added\n    types: [opened, labeled]\n\npermissions:\n  contents: write\n  pull-requests: read\n\njobs:\n  generate_changelog:\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Check out the repository\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 2\n\n      - name: Create and commit changelog on bot PR\n        id: bot_changelog\n        uses: emmyoop/changie_bot@v1.0\n        with:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n          commit_author_name: \"starburstdata-automation\"\n          commit_author_email: \"automation@starburstdata.com\"\n          commit_message: ${{ github.event.pull_request.title }}\n          changie_kind: \"Dependencies\"\n          label: \"dependencies\"\n          custom_changelog_string: \"custom:\\n  Author: ${{ github.event.pull_request.user.login }}\\n  Issue: ''\\n  PR: ${{ github.event.pull_request.number }}\"\n"
  },
  {
    "path": ".github/workflows/changelog-existence.yml",
    "content": "\n\n# **what?**\n# Checks that a file has been committed under the /.changes directory\n# as a new CHANGELOG entry.  Cannot check for a specific filename as\n# it is dynamically generated by change type and timestamp.\n# This workflow should not require any secrets since it runs for PRs\n# from forked repos.\n# By default, secrets are not passed to workflows running from\n# a forked repo.\n\n# **why?**\n# Ensure code change gets reflected in the CHANGELOG.\n\n# **when?**\n# This will run for all PRs going into master.  It will\n# run when they are opened, reopened, when any label is added or removed\n# and when new code is pushed to the branch.  The action will then get\n# skipped if the 'Skip Changelog' label is present is any of the labels.\n\nname: Check Changelog Entry\n\non:\n  pull_request:\n    types: [opened, reopened, labeled, unlabeled, synchronize]\n  workflow_dispatch:\n\ndefaults:\n  run:\n    shell: bash\n\npermissions:\n  contents: read\n  pull-requests: write\n\njobs:\n  changelog:\n    uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main\n    with:\n      changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry).'\n      skip_label: 'Skip Changelog'\n    secrets: inherit\n"
  },
  {
    "path": ".github/workflows/ci.yml",
    "content": "name: dbt-trino tests\non:\n  push:\n    branches:\n      - master\n      - \"*.*.latest\"\n    paths-ignore:\n      - \"**/*.md\"\n  pull_request:\n    branches:\n      - master\n      - \"*.*.latest\"\n    paths-ignore:\n      - \"**/*.md\"\n\njobs:\n  checks:\n    runs-on: ubuntu-latest\n    steps:\n      - name: \"Checkout the source code\"\n        uses: actions/checkout@v4\n\n      - name: \"Install Python\"\n        uses: actions/setup-python@v5\n\n      - name: \"Install dev requirements\"\n        run: pip install -r dev_requirements.txt\n\n      - name: \"Run pre-commit checks\"\n        run: pre-commit run --all-files\n  test:\n    runs-on: ubuntu-latest\n    strategy:\n      fail-fast: false\n      matrix:\n        engine:\n          - \"trino\"\n          - \"starburst\"\n          - \"starburst_galaxy\"\n        python:\n          - \"3.9\"\n          - \"3.10\"\n          - \"3.11\"\n          - \"3.12\"\n          - \"3.13\"\n        isStarburstBranch:\n          - ${{ (github.event_name == 'pull_request' && contains(github.event.pull_request.head.repo.full_name, 'starburstdata')) || github.event_name != 'pull_request' }}\n        exclude:\n          - engine: \"starburst_galaxy\"\n            python: \"3.13\"\n            isStarburstBranch: false\n          - engine: \"starburst_galaxy\"\n            python: \"3.12\"\n          - engine: \"starburst_galaxy\"\n            python: \"3.11\"\n          - engine: \"starburst_galaxy\"\n            python: \"3.10\"\n          - engine: \"starburst_galaxy\"\n            python: \"3.9\"\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: ${{ matrix.python }}\n\n      - name: Run dbt-trino tests against ${{ matrix.engine }} on python ${{ matrix.python }}\n        env:\n          DBT_TESTS_STARBURST_GALAXY_HOST: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_HOST }}\n          DBT_TESTS_STARBURST_GALAXY_USER: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_USER }}\n          DBT_TESTS_STARBURST_GALAXY_PASSWORD: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_PASSWORD }}\n        run: |\n          if [[ ${{ matrix.engine }} == \"trino\" || ${{ matrix.engine }} == \"starburst\" ]]; then\n            make dbt-${{ matrix.engine }}-tests\n          elif [[ ${{ matrix.engine }} == \"starburst_galaxy\" ]]; then\n            python -m pip install -e . -r dev_requirements.txt\n            python -m pytest tests/functional --profile starburst_galaxy\n          fi\n\n      - name: Remove container on failure\n        if: failure()\n        run: ./docker/remove_${{ matrix.engine }}.bash || true\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: dbt-trino release\n\non:\n  workflow_dispatch:\n\njobs:\n  test:\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.13\"\n\n      - name: Test release\n        run: |\n          python3 -m venv env\n          source env/bin/activate\n          pip install -r dev_requirements.txt\n          pip install twine wheel setuptools\n          python setup.py sdist bdist_wheel\n          pip install dist/dbt_trino-*.tar.gz\n          pip install dist/dbt_trino-*-py3-none-any.whl\n          twine check dist/dbt_trino-*-py3-none-any.whl dist/dbt_trino-*.tar.gz\n\n  github-release:\n    name: GitHub release\n    runs-on: ubuntu-latest\n    needs: test\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.13\"\n\n      - name: Get dbt-trino version\n        run: echo \"version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = \"\\(.*\\)\\\"/\\1/p')\" >> $GITHUB_ENV\n\n      # Need to set an output variable because env variables can't be taken as input\n      # This is needed for the next step with releasing to GitHub\n      - name: Find release type\n        id: release_type\n        env:\n          IS_PRERELEASE: ${{ contains(env.version_number, 'rc') ||  contains(env.version_number, 'b') }}\n        run: |\n          echo \"isPrerelease=$IS_PRERELEASE\" >> $GITHUB_OUTPUT\n\n      - name: Create GitHub release\n        uses: actions/create-release@v1\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token\n        with:\n          tag_name: v${{env.version_number}}\n          release_name: v${{env.version_number}}\n          prerelease: ${{ steps.release_type.outputs.isPrerelease }}\n          body: |\n            [Release notes](https://github.com/starburstdata/dbt-trino/blob/master/CHANGELOG.md)\n            ```sh\n            $ pip install dbt-trino==${{env.version_number}}\n            ```\n\n  pypi-release:\n    name: Pypi release\n    runs-on: ubuntu-latest\n    needs: github-release\n    environment: PypiProd\n    permissions:\n      id-token: write\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.13\"\n\n      - name: Get dbt-trino version\n        run: echo \"version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = \"\\(.*\\)\\\"/\\1/p')\" >> $GITHUB_ENV\n\n      - name: Release to pypi\n        run: |\n          python3 -m venv env\n          source env/bin/activate\n          pip install -r dev_requirements.txt\n          pip install twine wheel setuptools\n          python setup.py sdist bdist_wheel\n          twine upload --non-interactive dist/dbt_trino-${{env.version_number}}-py3-none-any.whl dist/dbt_trino-${{env.version_number}}.tar.gz\n"
  },
  {
    "path": ".github/workflows/security.yml",
    "content": "name: Veracode SCA\n\non:\n  workflow_dispatch:\n\njobs:\n  veracode-sca-task:\n    runs-on: ubuntu-latest\n    name: Scan repository for Issues\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n      - name: Run Veracode SCA\n        env:\n          SRCCLR_API_TOKEN: ${{ secrets.SRCCLR_API_TOKEN }}\n        uses: veracode/veracode-sca@v1.09\n\n        with:\n          github_token: ${{ secrets.GITHUB_TOKEN }}\n          create-issues: true\n          min-cvss-for-issue: 1\n          fail-on-cvss: 11\n"
  },
  {
    "path": ".github/workflows/version-bump.yml",
    "content": "# **what?**\n# This workflow will take the new version number to bump to. With that\n# it will run versionbump to update the version number everywhere in the\n# code base and then run changie to create the corresponding changelog.\n# A PR will be created with the changes that can be reviewed before committing.\n\n# **why?**\n# This is to aid in releasing dbt-trino and making sure we have updated\n# the version in all places and generated the changelog.\n\n# **when?**\n# This is triggered manually\n\nname: Version Bump\n\non:\n  workflow_dispatch:\n    inputs:\n      version_number:\n        description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'\n        required: true\n\njobs:\n  bump:\n    runs-on: ubuntu-latest\n    steps:\n      - name: \"[DEBUG] Print Variables\"\n        run: |\n          echo \"all variables defined as inputs\"\n          echo The version_number: ${{ github.event.inputs.version_number }}\n\n      - name: Check out the repository\n        uses: actions/checkout@v4\n\n      - uses: actions/setup-python@v5\n        with:\n          python-version: \"3.8\"\n\n      - name: Install brew\n        run: |\n          echo \"/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin\" >> $GITHUB_PATH\n\n      - name: Install python dependencies\n        run: |\n          python3 -m venv env\n          source env/bin/activate\n          pip install --upgrade pip\n\n      - name: Audit Version and Parse Into Parts\n        id: semver\n        uses: dbt-labs/actions/parse-semver@v1\n        with:\n          version: ${{ github.event.inputs.version_number }}\n\n      - name: Set branch value\n        id: variables\n        run: |\n          echo \"BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID\" >> $GITHUB_OUTPUT\n\n      - name: Create PR branch\n        run: |\n          git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}\n          git push origin ${{ steps.variables.outputs.BRANCH_NAME }}\n          git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}\n\n      - name: Bump version\n        run: |\n          echo -en \"version = \\\"${{ github.event.inputs.version_number }}\\\"\\n\"  > dbt/adapters/trino/__version__.py\n          git status\n\n      - name: Run changie\n        run: |\n          brew tap miniscruff/changie https://github.com/miniscruff/changie\n          brew install changie\n          if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]\n          then\n            changie batch ${{ steps.semver.outputs.base-version }}  --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'\n          else\n            if [[ -d \".changes/${{ steps.semver.outputs.base-version }}\" ]]\n            then\n              changie batch ${{ steps.semver.outputs.base-version }}  --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases\n            else\n              changie batch ${{ steps.semver.outputs.base-version }}  --move-dir '${{ steps.semver.outputs.base-version }}'\n            fi\n          fi\n          changie merge\n          git status\n\n      - name: Commit version bump to branch\n        uses: EndBug/add-and-commit@v9\n        with:\n          author_name: 'Github Build Bot'\n          author_email: 'automation@starburstdata.com'\n          message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'\n          branch: '${{ steps.variables.outputs.BRANCH_NAME }}'\n          push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'\n\n      - name: Create Pull Request\n        uses: peter-evans/create-pull-request@v7\n        with:\n          author: 'Github Build Bot <automation@starburstdata.com>'\n          base: ${{github.ref}}\n          title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'\n          branch: '${{ steps.variables.outputs.BRANCH_NAME }}'\n          labels: |\n            Skip Changelog\n"
  },
  {
    "path": ".gitignore",
    "content": "*.egg-info\nenv/\n__pycache__/\n.tox/\n.idea/\nbuild/\ndist/\ndbt-integration-tests\ndocker/dbt/.user.yml\n.DS_Store\n.vscode/\nlogs/\n.venv/\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "content": "# Configuration for pre-commit hooks (see https://pre-commit.com/).\n# Eventually the hooks described here will be run as tests before merging each PR.\n\n# TODO: remove global exclusion of tests when testing overhaul is complete\nexclude: ^test/\n\nrepos:\n  - repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: v4.4.0\n    hooks:\n      - id: check-yaml\n        args: [--unsafe]\n      - id: check-json\n      - id: end-of-file-fixer\n      - id: trailing-whitespace\n        exclude_types:\n          - \"markdown\"\n      - id: check-case-conflict\n  - repo: https://github.com/dbt-labs/pre-commit-hooks\n    rev: v0.1.0a1\n    hooks:\n      - id: dbt-core-in-adapters-check\n  - repo: https://github.com/psf/black\n    rev: 23.3.0\n    hooks:\n      - id: black\n        args:\n          - \"--line-length=99\"\n          - \"--target-version=py38\"\n      - id: black\n        alias: black-check\n        stages: [manual]\n        args:\n          - \"--line-length=99\"\n          - \"--target-version=py38\"\n          - \"--check\"\n          - \"--diff\"\n  - repo: https://github.com/pycqa/isort\n    rev: 5.12.0\n    hooks:\n      - id: isort\n        args: [ \"--profile\", \"black\", \"--filter-files\" ]\n  - repo: https://github.com/pycqa/flake8\n    rev: 7.1.2\n    hooks:\n      - id: flake8\n      - id: flake8\n        alias: flake8-check\n        stages: [manual]\n  - repo: https://github.com/pre-commit/mirrors-mypy\n    rev: v1.2.0\n    hooks:\n      - id: mypy\n        # N.B.: Mypy is... a bit fragile.\n        #\n        # By using `language: system` we run this hook in the local\n        # environment instead of a pre-commit isolated one.  This is needed\n        # to ensure mypy correctly parses the project.\n\n        # It may cause trouble in that it adds environmental variables out\n        # of our control to the mix.  Unfortunately, there's nothing we can\n        # do about per pre-commit's author.\n        # See https://github.com/pre-commit/pre-commit/issues/730 for details.\n        args: [ --show-error-codes, --ignore-missing-imports ]\n        files: ^dbt/adapters/.*\n        language: system\n      - id: mypy\n        alias: mypy-check\n        stages: [ manual ]\n        args: [ --show-error-codes, --pretty, --ignore-missing-imports ]\n        files: ^dbt/adapters\n        language: system\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# dbt-trino Changelog\n\n- This file provides a full account of all changes to `dbt-trino`\n- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.\n- \"Breaking changes\" listed under a version may require action from end users or external maintainers when upgrading to that version.\n- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry)\n## dbt-trino 1.10.1 - January 16, 2026\n### Dependencies\n- Bump dbt-adapters>=1.16,<2.0 ([#507](https://github.com/starburstdata/dbt-trino/issues/507), [#507](https://github.com/starburstdata/dbt-trino/pull/507))\n\n### Contributors\n- [@zqureshi](https://github.com/zqureshi) ([#507](https://github.com/starburstdata/dbt-trino/pull/507))\n## dbt-trino 1.10.0 - December 16, 2025\n### Features\n- Add support for catalog integration ([#502](https://github.com/starburstdata/dbt-trino/pull/502))\n\n### Contributors\n- [@damian3031](https://github.com/damian3031) ([#502](https://github.com/starburstdata/dbt-trino/pull/502))\n## Previous Releases\n\nFor information on prior major and minor releases, see their changelogs:\n\n* [1.9](https://github.com/starburstdata/dbt-trino/blob/1.9.latest/CHANGELOG.md)\n* [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md)\n* [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md)\n* [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md)\n* [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md)\n* [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md)\n* [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md)\n* [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md)\n* [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md)\n* [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md)\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to `dbt-trino`\n\n## Getting the code\n\n### How to contribute?\n\nYou can contribute to `dbt-trino` by forking the `dbt-trino` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:\n\n1. Fork the `dbt-trino` repository\n2. Clone your fork locally\n3. Check out a new branch for your proposed changes\n4. Push changes to your fork\n5. Open a pull request against `starburstdata/dbt-trino` from your forked repository\n\n## Setting up an environment\n\nThere are some tools that will be helpful to you in developing locally. While this is the list relevant for `dbt-trino` development, many of these tools are used commonly across open-source python projects.\n\n### Tools\n\nThese are the tools used in `dbt-trino` development and testing:\n\n- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.9, 3.10, 3.11, 3.12, and 3.13\n- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests\n- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting\n- [`black`](https://github.com/psf/black) for code formatting\n- [`isort`](https://pycqa.github.io/isort/) for sorting imports\n- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking\n- [`pre-commit`](https://pre-commit.com) to easily run those checks\n- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts\n- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.\n- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-trino` repository\n\nA deep understanding of these tools in not required to effectively contribute to `dbt-trino`, but we recommend checking out the attached documentation if you're interested in learning more about each one.\n\n#### Virtual environments\n\nWe strongly recommend using virtual environments when developing code in `dbt-trino`. We recommend creating this virtualenv\nin the root of the `dbt-trino` repository. To create a new virtualenv, run:\n```sh\npython3 -m venv env\nsource env/bin/activate\n```\n\nThis will create and activate a new Python virtual environment.\n\n#### Docker and `docker compose`\n\nDocker and `docker compose` are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).\n\n## Running `dbt-trino` in development\n\n### Installation\n\nFirst make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment).  Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-trino` (and its dependencies) with:\n\n```sh\npip install -e . -r dev_requirements.txt\n```\n\nWhen installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.\n\n### Running `dbt-trino`\n\nWith your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.\n\nConfigure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Trino instance if appropriate.\n\n## Testing\n\nOnce you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that:\n- Your code changes do not unexpectedly break other established functionality\n- Your code changes can handle all known edge cases\n- The functionality you're adding will _keep_ working in the future\n\n### Initial setup\n\nTo be able to run the tests locally you will need a Trino or Starburst instance.\n\n```sh\n# to start Trino\nmake start-trino\n# to start Starburst\nmake start-starburst\n```\n\n### Test commands\n\nThere are a few methods for running tests locally.\n\n#### Makefile\n\nThere are multiple targets in the Makefile to run common test suites and code\nchecks, most notably:\n\n```sh\n# Runs integration tests on Trino\nmake dbt-trino-tests\n# Runs integration tests on Starburst\nmake dbt-starburst-tests\n```\n> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,\n> unless you use choose a Docker container to run tests. Run `make help` for more info.\n\n#### `pre-commit`\n[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment.  Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.\n\n#### `tox`\n\n[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.9, 3.10, 3.11, 3.12, and 3.13 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py39`. The configuration for these tests in located in `tox.ini`.\n\n#### `pytest`\n\nFinally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:\n\n```sh\n# run all unit tests in a file\npython3 -m pytest tests/unit/utils.py\n# run a specific unit test\npython3 -m pytest tests/unit/test_adapter.py::TestTrinoAdapter::test_acquire_connection\n# run integration tests\npython3 -m pytest tests/functional\n```\n\n> See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.\n\nThe catalog in the dbt profile can be setup through [pytest markers](https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers), if no marker has been specified the memory catalog is used.\n\nFor example if you want to set the dbt profile to connect to the Delta Lake catalog, annotate your test with `@pytest.mark.delta`, (supported markers are `postgresql`, `delta` or `iceberg`).\n\n```\n@pytest.mark.delta\ndef test_run_seed_test(self, project):\n  ...\n```\n\n## Adding CHANGELOG Entry\n\nWe use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.\n\nFollow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.\n\nOnce changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry.  Commit the file that's created and your changelog entry is complete!\n\nYou don't need to worry about which `dbt-trino` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `master` branch. \n\n## Submitting a Pull Request\n\nA `dbt-trino` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.\n\nAutomated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-trino` repository trigger integration tests against Trino and Starburst.\n\nOnce all tests are passing and your PR has been approved, a `dbt-trino` maintainer will merge your changes into the master branch. And that's it! Happy developing :tada:\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2021 Starburst Data, Inc.\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "Makefile",
    "content": ".EXPORT_ALL_VARIABLES:\n\nDBT_TEST_USER_1=user1\nDBT_TEST_USER_2=user2\nDBT_TEST_USER_3=user3\n\nstart-trino:\n\tdocker network create dbt-net || true\n\t./docker/init_trino.bash\n\ndbt-trino-tests: start-trino\n\tpip install -e . -r dev_requirements.txt\n\ttox -r\n\nstart-starburst:\n\tdocker network create dbt-net || true\n\t./docker/init_starburst.bash\n\ndbt-starburst-tests: start-starburst\n\tpip install -e . -r dev_requirements.txt\n\ttox -r\n\ndev:\n\tpre-commit install\n"
  },
  {
    "path": "README.md",
    "content": "# dbt-trino\n\n<picture>\n  <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/Starburst_Logo_White%2BBlue.svg\" width=\"98%\">\n  <source media=\"(prefers-color-scheme: light)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/Starburst_Logo_Black%2BBlue.svg\" width=\"98%\">\n  <img alt=\"Starburst\" src=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/Starburst_Logo_Black%2BBlue.svg\">\n</picture>\n<picture>\n  <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/dbt-signature_tm_light.svg\" width=\"45%\">\n  <source media=\"(prefers-color-scheme: light)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/dbt-signature_tm.svg\" width=\"45%\">\n  <img alt=\"dbt\" src=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/dbt-signature_tm.svg\">\n</picture>\n&nbsp&nbsp&nbsp&nbsp&nbsp&nbsp&nbsp\n<picture>\n  <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/trino-logo-dk-bg.svg\" width=\"50%\">\n  <source media=\"(prefers-color-scheme: light)\" srcset=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/trino-logo-w-bk.svg\" width=\"50%\">\n  <img alt=\"trino\" src=\"https://raw.githubusercontent.com/starburstdata/dbt-trino/master/assets/images/trino-logo-w-bk.svg\">\n</picture>\n\n[![Build Status](https://github.com/starburstdata/dbt-trino/actions/workflows/ci.yml/badge.svg)](https://github.com/starburstdata/dbt-trino/actions/workflows/ci.yml?query=workflow%3A%22dbt-trino+tests%22+branch%3Amaster+event%3Apush) [![db-starburst-and-trino Slack](https://img.shields.io/static/v1?logo=slack&logoColor=959DA5&label=Slack&labelColor=333a41&message=join%20conversation&color=3AC358)](https://getdbt.slack.com/channels/db-starburst-and-trino)\n\n## Introduction\n\n[dbt](https://docs.getdbt.com/docs/introduction) is a data transformation workflow tool that lets teams quickly and collaboratively deploy analytics code, following software engineering best practices like modularity, CI/CD, testing, and documentation. It enables anyone who knows SQL to build production-grade data pipelines.\n\nOne frequently asked question in the context of using `dbt` tool is:\n\n> Can I connect my dbt project to two databases?\n\n(see the answered [question](https://docs.getdbt.com/faqs/connecting-to-two-dbs-not-allowed) on the dbt website).\n\n**TL;DR** `dbt` stands for transformation as in `T` within `ELT` pipelines, it doesn't move data from source to a warehouse.\n\n`dbt-trino` adapter uses [Trino](https://trino.io/) as a underlying query engine to perform query federation across disperse data sources. Trino connects to multiple and diverse data sources ([available connectors](https://trino.io/docs/current/connector.html)) via one dbt connection and process SQL queries at scale. Transformations defined in dbt are passed to Trino which handles these SQL transformation queries and translates them to queries specific to the systems it connects to create tables or views and manipulate data.\n\nThis repository represents a fork of the [dbt-presto](https://github.com/dbt-labs/dbt-presto) with adaptations to make it work with Trino.\n\n## Compatibility\n\nThis dbt plugin has been tested against `Trino` version `478`, `Starburst Enterprise` version `477-e.1` and `Starburst Galaxy`.\n\n## Setup & Configuration\n\nFor information on installing and configuring your profile to authenticate to Trino or Starburst, please refer to [Starburst and Trino Setup](https://docs.getdbt.com/reference/warehouse-setups/trino-setup) in the dbt docs.\n\n### Trino- and Starburst-specific configuration\n\nFor Trino- and Starburst-specific configuration, you can refer to [Starburst (Trino) configurations](https://docs.getdbt.com/reference/resource-configs/trino-configs) on the dbt docs site.\n\n## Contributing\n\n- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/) in the [#db-starburst-and-trino](https://getdbt.slack.com/channels/db-starburst-and-trino) channel, or on [Trino slack](https://trino.io/slack.html) in the [#python](https://trinodb.slack.com/channels/python) channel, or open [an issue](https://github.com/starburstdata/dbt-trino/issues/new)\n- Want to help us build dbt-trino? Check out the [Contributing Guide](https://github.com/starburstdata/dbt-trino/blob/HEAD/CONTRIBUTING.md)\n\n### Release process\nFirst 5 steps are ONLY relevant for bumping __minor__ version:\n1. Create `1.x.latest` branch from the latest tag corresponding to current minor version, e.g. `git checkout -b 1.6.latest v1.6.2` (when bumping to 1.7). Push branch to remote. This branch will be used for potential backports.\n2. Create new branch (Do not push below commits to `1.x.latest`). Add a new entry in `.changes/0.0.0.md` that points to the newly created latest branch.\n3. Run `changie merge` to update `README.md`. After that, remove changie files and folders related to current minor version. Commit.\n4. Bump version of `dbt-tests-adapter`. Commit.\n5. Merge these 2 commits into the master branch. Add a `Skip Changlelog` label to the PR.\n\nContinue with the next steps for a __minor__ version bump. Start from this point for a __patch__ version bump:\n1. Run `Version Bump` workflow. The major and minor part of the dbt version are used to associate dbt-trino's version with the dbt version.\n2. Merge the bump PR. Make sure that test suite pass.\n3. Run `dbt-trino release` workflow to release `dbt-trino` to PyPi and GitHub.\n\n### Backport process\n\nSometimes it is necessary to backport some changes to some older versions. In that case, create branch from `x.x.latest` branch. There is a `x.x.latest` for each minor version, e.g. `1.3.latest`. Make a fix and open PR back to `x.x.latest`. Create changelog by `changie new` as ususal, as separate changlog for each minor version is kept on every `x.x.latest` branch.\nAfter merging, to make a release of that version, just follow instructions from **Release process** section, but run every workflow on `x.x.latest` branch.\n\n## Code of Conduct\n\nEveryone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected\nto follow the [PyPA Code of Conduct](https://www.pypa.io/en/latest/code-of-conduct/).\n"
  },
  {
    "path": "dbt/adapters/trino/__init__.py",
    "content": "from dbt.adapters.base import AdapterPlugin\n\nfrom dbt.adapters.trino.column import TrinoColumn  # noqa\nfrom dbt.adapters.trino.connections import TrinoConnectionManager  # noqa\nfrom dbt.adapters.trino.connections import TrinoCredentialsFactory\nfrom dbt.adapters.trino.relation import TrinoRelation  # noqa\n\nfrom dbt.adapters.trino.impl import TrinoAdapter  # isort: split\nfrom dbt.include import trino\n\nPlugin = AdapterPlugin(\n    adapter=TrinoAdapter,  # type: ignore\n    credentials=TrinoCredentialsFactory,  # type: ignore\n    include_path=trino.PACKAGE_PATH,\n)\n"
  },
  {
    "path": "dbt/adapters/trino/__version__.py",
    "content": "version = \"1.10.1\"\n"
  },
  {
    "path": "dbt/adapters/trino/catalogs/__init__.py",
    "content": "from dbt.adapters.trino.catalogs._relation import TrinoCatalogRelation\nfrom dbt.adapters.trino.catalogs._trino_catalog_metastore import TrinoCatalogIntegration\n\n__all__ = [\n    \"TrinoCatalogIntegration\",\n    \"TrinoCatalogRelation\",\n]\n"
  },
  {
    "path": "dbt/adapters/trino/catalogs/_relation.py",
    "content": "from dataclasses import dataclass\nfrom typing import Optional\n\nfrom dbt.adapters.catalogs import CatalogRelation\n\nfrom dbt.adapters.trino import constants\n\n\n@dataclass\nclass TrinoCatalogRelation(CatalogRelation):\n    catalog_type: str = constants.DEFAULT_TRINO_CATALOG.catalog_type\n    catalog_name: Optional[str] = constants.DEFAULT_TRINO_CATALOG.name\n    table_format: Optional[str] = None\n    file_format: Optional[str] = None\n    external_volume: Optional[str] = None\n    storage_uri: Optional[str] = None\n"
  },
  {
    "path": "dbt/adapters/trino/catalogs/_trino_catalog_metastore.py",
    "content": "from typing import Optional\n\nfrom dbt.adapters.catalogs import CatalogIntegration, CatalogIntegrationConfig\nfrom dbt.adapters.contracts.relation import RelationConfig\n\nfrom dbt.adapters.trino import constants\nfrom dbt.adapters.trino.catalogs._relation import TrinoCatalogRelation\n\n\nclass TrinoCatalogIntegration(CatalogIntegration):\n    \"\"\"\n    Catalog type:\n        In Trino, the metastore for a catalog is set when configuring the connector.\n        This cannot be configured using dbt's generated SQL.\n\n        Documentation:\n            https://trino.io/docs/current/overview/concepts.html#catalog\n            https://trino.io/docs/current/object-storage/metastores.html\n\n    Table format:\n        For Trino and Starburst SEP, the table format is specified by the connector configuration.\n        Setting table_format here will result in error, as 'type' property is unavailable in Trino and Starburst SEP.\n        If you are using Starburst Galaxy, you can set the default table format to use for this catalog.\n        It will set `type` property to specified table format.\n\n        Documentation:\n            https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/index.html\n    \"\"\"\n\n    catalog_type = constants.TRINO_CATALOG_TYPE\n    allows_writes = True\n\n    def __init__(self, config: CatalogIntegrationConfig) -> None:\n        super().__init__(config)\n        self.storage_uri = config.adapter_properties.get(\"storage_uri\")\n\n    def build_relation(self, model: RelationConfig) -> TrinoCatalogRelation:\n        return TrinoCatalogRelation(\n            catalog_type=self.catalog_type,\n            catalog_name=self.catalog_name,\n            table_format=self.table_format,\n            file_format=self.file_format,\n            external_volume=self.external_volume,\n            storage_uri=self._calculate_storage_uri(model),\n        )\n\n    def _calculate_storage_uri(self, model: RelationConfig) -> Optional[str]:\n        if not model.config:\n            return None\n\n        if model_storage_uri := model.config.get(\"storage_uri\"):\n            return model_storage_uri\n\n        if not self.external_volume:\n            return None\n\n        # Default dbt behavior is that if base_location_root is not specified, `_dbt` prefix is added.\n        # Even if base_location_root is explicitly set to None, `_dbt` prefix is still added.\n        # Allow omitting the prefix by setting omit_base_location_root to True.\n        omit_base_location_root = model.config.get(\"omit_base_location_root\")\n        if omit_base_location_root:\n            storage_uri = f\"{self.external_volume}/{model.schema}/{model.name}\"\n        else:\n            prefix = model.config.get(\"base_location_root\") or \"_dbt\"\n            storage_uri = f\"{self.external_volume}/{prefix}/{model.schema}/{model.name}\"\n        if suffix := model.config.get(\"base_location_subpath\"):\n            storage_uri = f\"{storage_uri}/{suffix}\"\n        return storage_uri\n"
  },
  {
    "path": "dbt/adapters/trino/column.py",
    "content": "import re\nfrom dataclasses import dataclass\nfrom typing import ClassVar, Dict\n\nfrom dbt.adapters.base.column import Column\nfrom dbt_common.exceptions import DbtRuntimeError\n\n# Taken from the MAX_LENGTH variable in\n# https://github.com/trinodb/trino/blob/master/core/trino-spi/src/main/java/io/trino/spi/type/VarcharType.java\nTRINO_VARCHAR_MAX_LENGTH = 2147483646\n\n\n@dataclass\nclass TrinoColumn(Column):\n    TYPE_LABELS: ClassVar[Dict[str, str]] = {\n        \"STRING\": \"VARCHAR\",\n        \"FLOAT\": \"DOUBLE\",\n    }\n\n    @property\n    def data_type(self):\n        # when varchar has no defined size, default to unbound varchar\n        # the super().data_type defaults to varchar(256)\n        if self.dtype.lower() == \"varchar\" and self.char_size is None:\n            return self.dtype\n\n        return super().data_type\n\n    def is_string(self) -> bool:\n        return self.dtype.lower() in [\"varchar\", \"char\"]\n\n    def is_float(self) -> bool:\n        return self.dtype.lower() in [\n            \"real\",\n            \"double precision\",\n            \"double\",\n        ]\n\n    def is_integer(self) -> bool:\n        return self.dtype.lower() in [\n            \"tinyint\",\n            \"smallint\",\n            \"integer\",\n            \"int\",\n            \"bigint\",\n        ]\n\n    def is_numeric(self) -> bool:\n        return self.dtype.lower() == \"decimal\"\n\n    @classmethod\n    def string_type(cls, size: int) -> str:\n        return \"varchar({})\".format(size)\n\n    def string_size(self) -> int:\n        # override the string_size function to handle the unbound varchar case\n        if self.dtype.lower() == \"varchar\" and self.char_size is None:\n            return TRINO_VARCHAR_MAX_LENGTH\n\n        return super().string_size()\n\n    @classmethod\n    def from_description(cls, name: str, raw_data_type: str) -> \"Column\":\n        # Most of the Trino data types specify a type and not a precision/scale/charsize\n        if not raw_data_type.lower().startswith((\"varchar\", \"char\", \"decimal\")):\n            return cls(name, raw_data_type)\n        # Trino data types that do specify a precision/scale/charsize:\n        match = re.match(\n            r\"(?P<type>[^(]+)(?P<size>\\([^)]+\\))?(?P<type_suffix>[\\w ]+)?\", raw_data_type\n        )\n        if match is None:\n            raise DbtRuntimeError(f'Could not interpret data type \"{raw_data_type}\"')\n        data_type = match.group(\"type\")\n        size_info = match.group(\"size\")\n        data_type_suffix = match.group(\"type_suffix\")\n        if data_type_suffix:\n            data_type += data_type_suffix\n        char_size = None\n        numeric_precision = None\n        numeric_scale = None\n        if size_info is not None:\n            # strip out the parentheses\n            size_info = size_info[1:-1]\n            parts = size_info.split(\",\")\n            if len(parts) == 1:\n                try:\n                    char_size = int(parts[0])\n                except ValueError:\n                    raise DbtRuntimeError(\n                        f'Could not interpret data_type \"{raw_data_type}\": '\n                        f'could not convert \"{parts[0]}\" to an integer'\n                    )\n            elif len(parts) == 2:\n                try:\n                    numeric_precision = int(parts[0])\n                except ValueError:\n                    raise DbtRuntimeError(\n                        f'Could not interpret data_type \"{raw_data_type}\": '\n                        f'could not convert \"{parts[0]}\" to an integer'\n                    )\n                try:\n                    numeric_scale = int(parts[1])\n                except ValueError:\n                    raise DbtRuntimeError(\n                        f'Could not interpret data_type \"{raw_data_type}\": '\n                        f'could not convert \"{parts[1]}\" to an integer'\n                    )\n\n        return cls(name, data_type, char_size, numeric_precision, numeric_scale)\n"
  },
  {
    "path": "dbt/adapters/trino/connections.py",
    "content": "import decimal\nimport os\nimport re\nfrom abc import ABCMeta, abstractmethod\nfrom contextlib import contextmanager\nfrom dataclasses import dataclass, field\nfrom datetime import date, datetime\nfrom enum import Enum\nfrom typing import Any, Dict, List, Optional, Union\n\nimport sqlparse\nimport trino\nfrom dbt.adapters.contracts.connection import AdapterResponse, Credentials\nfrom dbt.adapters.events.logging import AdapterLogger\nfrom dbt.adapters.exceptions.connection import FailedToConnectError\nfrom dbt.adapters.sql import SQLConnectionManager\nfrom dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError\nfrom dbt_common.helper_types import Port\nfrom trino.transaction import IsolationLevel\n\nfrom dbt.adapters.trino.__version__ import version\n\nlogger = AdapterLogger(\"Trino\")\nPREPARED_STATEMENTS_ENABLED_DEFAULT = True\n\n\nclass HttpScheme(Enum):\n    HTTP = \"http\"\n    HTTPS = \"https\"\n\n\nclass TrinoCredentialsFactory:\n    @classmethod\n    def _create_trino_profile(cls, profile):\n        if \"method\" in profile:\n            method = profile[\"method\"]\n            if method == \"ldap\":\n                return TrinoLdapCredentials\n            elif method == \"certificate\":\n                return TrinoCertificateCredentials\n            elif method == \"kerberos\":\n                return TrinoKerberosCredentials\n            elif method == \"jwt\":\n                return TrinoJwtCredentials\n            elif method == \"oauth\":\n                return TrinoOauthCredentials\n            elif method == \"oauth_console\":\n                return TrinoOauthConsoleCredentials\n        return TrinoNoneCredentials\n\n    @classmethod\n    def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]:\n        klazz = cls._create_trino_profile(kwargs)\n        return klazz.translate_aliases(kwargs, recurse)\n\n    @classmethod\n    def validate(cls, data: Any):\n        klazz = cls._create_trino_profile(data)\n        return klazz.validate(data)\n\n    @classmethod\n    def from_dict(cls, data: Any):\n        klazz = cls._create_trino_profile(data)\n        return klazz.from_dict(data)\n\n\nclass TrinoCredentials(Credentials, metaclass=ABCMeta):\n    _ALIASES = {\"catalog\": \"database\"}\n\n    @property\n    def type(self):\n        return \"trino\"\n\n    @property\n    def unique_field(self):\n        return self.host\n\n    def _connection_keys(self):\n        return (\n            \"method\",\n            \"host\",\n            \"port\",\n            \"user\",\n            \"database\",\n            \"schema\",\n            \"cert\",\n            \"prepared_statements_enabled\",\n        )\n\n    @abstractmethod\n    def trino_auth(self) -> Optional[trino.auth.Authentication]:\n        pass\n\n\n@dataclass\nclass TrinoNoneCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    user: str\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_scheme: HttpScheme = HttpScheme.HTTP\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def method(self):\n        return \"none\"\n\n    def trino_auth(self):\n        return trino.constants.DEFAULT_AUTH\n\n\n@dataclass\nclass TrinoCertificateCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    client_certificate: str\n    client_private_key: str\n    user: Optional[str] = None\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"certificate\"\n\n    def trino_auth(self):\n        return trino.auth.CertificateAuthentication(\n            self.client_certificate, self.client_private_key\n        )\n\n\n@dataclass\nclass TrinoLdapCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    user: str\n    password: str\n    impersonation_user: Optional[str] = None\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"ldap\"\n\n    def trino_auth(self):\n        return trino.auth.BasicAuthentication(username=self.user, password=self.password)\n\n\n@dataclass\nclass TrinoKerberosCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    user: str\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    keytab: Optional[str] = None\n    principal: Optional[str] = None\n    krb5_config: Optional[str] = None\n    service_name: Optional[str] = \"trino\"\n    mutual_authentication: Optional[bool] = False\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    force_preemptive: Optional[bool] = False\n    hostname_override: Optional[str] = None\n    sanitize_mutual_error_response: Optional[bool] = True\n    delegate: Optional[bool] = False\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"kerberos\"\n\n    def trino_auth(self):\n        os.environ[\"KRB5_CLIENT_KTNAME\"] = self.keytab\n        return trino.auth.KerberosAuthentication(\n            config=self.krb5_config,\n            service_name=self.service_name,\n            principal=self.principal,\n            mutual_authentication=self.mutual_authentication,\n            ca_bundle=self.cert,\n            force_preemptive=self.force_preemptive,\n            hostname_override=self.hostname_override,\n            sanitize_mutual_error_response=self.sanitize_mutual_error_response,\n            delegate=self.delegate,\n        )\n\n\n@dataclass\nclass TrinoJwtCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    jwt_token: str\n    user: Optional[str] = None\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"jwt\"\n\n    def trino_auth(self):\n        return trino.auth.JWTAuthentication(self.jwt_token)\n\n\n@dataclass\nclass TrinoOauthCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    user: Optional[str] = None\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    OAUTH = trino.auth.OAuth2Authentication(\n        redirect_auth_url_handler=trino.auth.WebBrowserRedirectHandler()\n    )\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"oauth\"\n\n    def trino_auth(self):\n        return self.OAUTH\n\n\n@dataclass\nclass TrinoOauthConsoleCredentials(TrinoCredentials):\n    host: str\n    port: Port\n    user: Optional[str] = None\n    client_tags: Optional[List[str]] = None\n    roles: Optional[Dict[str, str]] = None\n    cert: Optional[Union[str, bool]] = None\n    http_headers: Optional[Dict[str, str]] = None\n    session_properties: Dict[str, Any] = field(default_factory=dict)\n    prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT\n    retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS\n    timezone: Optional[str] = None\n    OAUTH = trino.auth.OAuth2Authentication(\n        redirect_auth_url_handler=trino.auth.ConsoleRedirectHandler()\n    )\n    suppress_cert_warning: Optional[bool] = None\n\n    @property\n    def http_scheme(self):\n        return HttpScheme.HTTPS\n\n    @property\n    def method(self):\n        return \"oauth_console\"\n\n    def trino_auth(self):\n        return self.OAUTH\n\n\nclass ConnectionWrapper(object):\n    \"\"\"Wrap a Trino connection in a way that accomplishes two tasks:\n\n    - prefetch results from execute() calls so that trino calls actually\n        persist to the db but then present the usual cursor interface\n    - provide `cancel()` on the same object as `commit()`/`rollback()`/...\n\n    \"\"\"\n\n    def __init__(self, handle, prepared_statements_enabled):\n        self.handle = handle\n        self._cursor = None\n        self._fetch_result = None\n        self._prepared_statements_enabled = prepared_statements_enabled\n\n    def cursor(self):\n        self._cursor = self.handle.cursor()\n        return self\n\n    def cancel(self):\n        if self._cursor is not None:\n            self._cursor.cancel()\n\n    def close(self):\n        # this is a noop on trino, but pass it through anyway\n        self.handle.close()\n\n    def commit(self):\n        pass\n\n    def rollback(self):\n        pass\n\n    def start_transaction(self):\n        pass\n\n    def fetchall(self):\n        if self._cursor is None:\n            return None\n\n        if self._fetch_result is not None:\n            ret = self._fetch_result\n            self._fetch_result = None\n            return ret\n\n        return None\n\n    def fetchone(self):\n        if self._cursor is None:\n            return None\n\n        if self._fetch_result is not None:\n            ret = self._fetch_result[0]\n            self._fetch_result = None\n            return ret\n\n        return None\n\n    def fetchmany(self, size):\n        if self._cursor is None:\n            return None\n\n        if self._fetch_result is not None:\n            ret = self._fetch_result[:size]\n            self._fetch_result = None\n            return ret\n\n        return None\n\n    def execute(self, sql, bindings=None):\n        if not self._prepared_statements_enabled and bindings is not None:\n            # DEPRECATED: by default prepared statements are used.\n            # Code is left as an escape hatch if prepared statements\n            # are failing.\n            bindings = tuple(self._escape_value(b) for b in bindings)\n            sql = sql % bindings\n\n            result = self._cursor.execute(sql)\n        else:\n            result = self._cursor.execute(sql, params=bindings)\n\n        self._fetch_result = self._cursor.fetchall()\n        return result\n\n    @property\n    def description(self):\n        return self._cursor.description\n\n    @classmethod\n    def _escape_value(cls, value):\n        \"\"\"A not very comprehensive system for escaping bindings.\n\n        I think \"'\" (a single quote) is the only character that matters.\n        \"\"\"\n        numbers = (decimal.Decimal, int, float)\n        if value is None:\n            return \"NULL\"\n        elif isinstance(value, str):\n            return \"'{}'\".format(value.replace(\"'\", \"''\"))\n        elif isinstance(value, numbers):\n            return value\n        elif isinstance(value, datetime):\n            time_formatted = value.strftime(\"%Y-%m-%d %H:%M:%S.%f\")[:-3]\n            return \"TIMESTAMP '{}'\".format(time_formatted)\n        elif isinstance(value, date):\n            date_formatted = value.strftime(\"%Y-%m-%d\")\n            return \"DATE '{}'\".format(date_formatted)\n        else:\n            raise ValueError(\"Cannot escape {}\".format(type(value)))\n\n\n@dataclass\nclass TrinoAdapterResponse(AdapterResponse):\n    query: str = \"\"\n    query_id: str = \"\"\n\n\nclass TrinoConnectionManager(SQLConnectionManager):\n    TYPE = \"trino\"\n    behavior_flags = None\n\n    def __init__(self, profile, mp_context, behavior_flags=None) -> None:\n        super().__init__(profile, mp_context)\n\n        TrinoConnectionManager.behavior_flags = behavior_flags\n\n    @contextmanager\n    def exception_handler(self, sql):\n        try:\n            yield\n        except trino.exceptions.Error as e:\n            msg = str(e)\n\n            if \"Failed to establish a new connection\" in msg:\n                raise FailedToConnectError(msg) from e\n\n            if isinstance(e, trino.exceptions.TrinoQueryError):\n                logger.debug(\"Trino query id: {}\".format(e.query_id))\n            logger.debug(\"Trino error: {}\".format(msg))\n\n            raise DbtDatabaseError(msg)\n        except Exception as e:\n            msg = str(e)\n            if isinstance(e, DbtRuntimeError):\n                # during a sql query, an internal to dbt exception was raised.\n                # this sounds a lot like a signal handler and probably has\n                # useful information, so raise it without modification.\n                raise\n            raise DbtRuntimeError(msg) from e\n\n    # For connection in auto-commit mode there is no need to start\n    # separate transaction. If using auto-commit, the client will\n    # create a new transaction and commit/rollback for each query\n    def add_begin_query(self):\n        pass\n\n    def add_commit_query(self):\n        pass\n\n    @classmethod\n    def open(cls, connection):\n        if connection.state == \"open\":\n            logger.debug(\"Connection is already open, skipping open.\")\n            return connection\n\n        credentials = connection.credentials\n\n        # set default `cert` value, according to\n        # require_certificate_validation behavior flag\n        if credentials.cert is None:\n            req_cert_val_flag = cls.behavior_flags.require_certificate_validation.setting\n            if req_cert_val_flag:\n                credentials.cert = True\n\n        if credentials.suppress_cert_warning:\n            import urllib3\n\n            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n\n        # it's impossible for trino to fail here as 'connections' are actually\n        # just cursor factories.\n        trino_conn = trino.dbapi.connect(\n            host=credentials.host,\n            port=credentials.port,\n            user=credentials.impersonation_user\n            if getattr(credentials, \"impersonation_user\", None)\n            else credentials.user,\n            client_tags=credentials.client_tags,\n            roles=credentials.roles,\n            catalog=credentials.database,\n            schema=credentials.schema,\n            http_scheme=credentials.http_scheme.value,\n            http_headers=credentials.http_headers,\n            session_properties=credentials.session_properties,\n            auth=credentials.trino_auth(),\n            max_attempts=credentials.retries,\n            isolation_level=IsolationLevel.AUTOCOMMIT,\n            source=f\"dbt-trino-{version}\",\n            verify=credentials.cert,\n            timezone=credentials.timezone,\n        )\n        connection.state = \"open\"\n        connection.handle = ConnectionWrapper(trino_conn, credentials.prepared_statements_enabled)\n        return connection\n\n    @classmethod\n    def get_response(cls, cursor) -> TrinoAdapterResponse:\n        code = cursor._cursor.update_type\n        if code is None:\n            code = \"SUCCESS\"\n\n        rows_affected = cursor._cursor.rowcount\n        if rows_affected == -1:\n            message = f\"{code}\"\n        else:\n            message = f\"{code} ({rows_affected:_} rows)\"\n        return TrinoAdapterResponse(\n            _message=message,\n            query=cursor._cursor.query,\n            query_id=cursor._cursor.query_id,\n            rows_affected=rows_affected,\n        )  # type: ignore\n\n    def cancel(self, connection):\n        connection.handle.cancel()\n\n    def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False):\n        connection = None\n        cursor = None\n\n        # TODO: is this sufficient? Largely copy+pasted from snowflake, so\n        # there's some common behavior here we can maybe factor out into the\n        # SQLAdapter?\n        queries = [q.rstrip(\";\") for q in sqlparse.split(sql)]\n\n        for individual_query in queries:\n            # hack -- after the last ';', remove comments and don't run\n            # empty queries. this avoids using exceptions as flow control,\n            # and also allows us to return the status of the last cursor\n            without_comments = re.sub(\n                re.compile(\"^.*(--.*)$\", re.MULTILINE), \"\", individual_query\n            ).strip()\n\n            if without_comments == \"\":\n                continue\n\n            parent = super(TrinoConnectionManager, self)\n            connection, cursor = parent.add_query(\n                individual_query, auto_begin, bindings, abridge_sql_log\n            )\n\n        if cursor is None:\n            conn = self.get_thread_connection()\n            if conn is None or conn.name is None:\n                conn_name = \"<None>\"\n            else:\n                conn_name = conn.name\n\n            raise DbtRuntimeError(\n                \"Tried to run an empty query on model '{}'. If you are \"\n                \"conditionally running\\nsql, eg. in a model hook, make \"\n                \"sure your `else` clause contains valid sql!\\n\\n\"\n                \"Provided SQL:\\n{}\".format(conn_name, sql)\n            )\n\n        return connection, cursor\n\n    @classmethod\n    def data_type_code_to_name(cls, type_code) -> str:\n        return type_code.split(\"(\")[0].upper()\n"
  },
  {
    "path": "dbt/adapters/trino/constants.py",
    "content": "from types import SimpleNamespace\n\nADAPTER_TYPE = \"trino\"\n\nTRINO_CATALOG_TYPE = \"trino\"\n\nDEFAULT_TRINO_CATALOG = SimpleNamespace(\n    name=\"trino_default\",\n    catalog_name=\"trino_default\",\n    catalog_type=\"trino\",\n    table_format=None,\n    file_format=None,\n    external_volume=None,\n    adapter_properties={},\n)\n"
  },
  {
    "path": "dbt/adapters/trino/impl.py",
    "content": "from dataclasses import dataclass\nfrom typing import Dict, List, Optional\n\nimport agate\nfrom dbt.adapters.base.impl import AdapterConfig, ConstraintSupport\nfrom dbt.adapters.base.meta import available\nfrom dbt.adapters.capability import (\n    Capability,\n    CapabilityDict,\n    CapabilitySupport,\n    Support,\n)\nfrom dbt.adapters.catalogs import CatalogRelation\nfrom dbt.adapters.contracts.relation import RelationConfig\nfrom dbt.adapters.sql import SQLAdapter\nfrom dbt_common.behavior_flags import BehaviorFlag\nfrom dbt_common.contracts.constraints import ConstraintType\nfrom dbt_common.exceptions import DbtDatabaseError\n\nfrom dbt.adapters.trino import (\n    TrinoColumn,\n    TrinoConnectionManager,\n    TrinoRelation,\n    constants,\n    parse_model,\n)\nfrom dbt.adapters.trino.catalogs import TrinoCatalogIntegration\n\n\n@dataclass\nclass TrinoConfig(AdapterConfig):\n    properties: Optional[Dict[str, str]] = None\n    view_security: Optional[str] = \"definer\"\n\n\nclass TrinoAdapter(SQLAdapter):\n    Relation = TrinoRelation\n    Column = TrinoColumn\n    ConnectionManager = TrinoConnectionManager\n    AdapterSpecificConfigs = TrinoConfig\n\n    CATALOG_INTEGRATIONS = [\n        TrinoCatalogIntegration,\n    ]\n\n    CONSTRAINT_SUPPORT = {\n        ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,\n        ConstraintType.not_null: ConstraintSupport.ENFORCED,\n        ConstraintType.unique: ConstraintSupport.NOT_SUPPORTED,\n        ConstraintType.primary_key: ConstraintSupport.NOT_SUPPORTED,\n        ConstraintType.foreign_key: ConstraintSupport.NOT_SUPPORTED,\n    }\n\n    _capabilities: CapabilityDict = CapabilityDict(\n        {\n            Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full),\n            # No information about last table modification in information_schema.tables\n            Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Unsupported),\n            Capability.TableLastModifiedMetadataBatch: CapabilitySupport(\n                support=Support.Unsupported\n            ),\n        }\n    )\n\n    def __init__(self, config, mp_context) -> None:\n        super().__init__(config, mp_context)\n        self.connections = self.ConnectionManager(config, mp_context, self.behavior)\n        self.add_catalog_integration(constants.DEFAULT_TRINO_CATALOG)\n\n    @property\n    def _behavior_flags(self) -> List[BehaviorFlag]:\n        return [\n            {  # type: ignore\n                \"name\": \"require_certificate_validation\",\n                \"default\": False,\n                \"description\": (\n                    \"SSL certificate validation is disabled by default. \"\n                    \"It is legacy behavior which will be changed in future releases. \"\n                    \"It is strongly advised to enable `require_certificate_validation` flag \"\n                    \"or explicitly set `cert` configuration to `True` for security reasons. \"\n                    \"You may receive an error after that if your SSL setup is incorrect.\"\n                ),\n            }\n        ]\n\n    @classmethod\n    def date_function(cls):\n        return \"datenow()\"\n\n    @classmethod\n    def convert_text_type(cls, agate_table, col_idx):\n        return \"VARCHAR\"\n\n    @classmethod\n    def convert_number_type(cls, agate_table, col_idx):\n        decimals = agate_table.aggregate(agate.MaxPrecision(col_idx))\n        return \"DOUBLE\" if decimals else \"INTEGER\"\n\n    @classmethod\n    def convert_datetime_type(cls, agate_table, col_idx):\n        return \"TIMESTAMP\"\n\n    @classmethod\n    def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:\n        return \"DATE\"\n\n    def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = \"hour\") -> str:\n        return f\"{add_to} + interval '{number}' {interval}\"\n\n    def get_columns_in_relation(self, relation):\n        try:\n            return super().get_columns_in_relation(relation)\n        except DbtDatabaseError as exc:\n            if \"does not exist\" in str(exc):\n                return []\n            else:\n                raise\n\n    def valid_incremental_strategies(self):\n        return [\"append\", \"merge\", \"delete+insert\", \"microbatch\"]\n\n    @available\n    def build_catalog_relation(self, model: RelationConfig) -> Optional[CatalogRelation]:\n        \"\"\"\n        Builds a relation for a given configuration.\n\n        This method uses the provided configuration to determine the appropriate catalog\n        integration and config parser for building the relation. It defaults to the trino\n        catalog if none is provided in the configuration for backward compatibility.\n\n        Args:\n            model (RelationConfig): `config.model` (not `model`) from the jinja context\n\n        Returns:\n            Any: The constructed relation object generated through the catalog integration and parser\n        \"\"\"\n        if catalog := parse_model.catalog_name(model):\n            catalog_integration = self.get_catalog_integration(catalog)\n            return catalog_integration.build_relation(model)\n        return None\n"
  },
  {
    "path": "dbt/adapters/trino/parse_model.py",
    "content": "from typing import Optional\n\nfrom dbt.adapters.catalogs import CATALOG_INTEGRATION_MODEL_CONFIG_NAME  # type: ignore\nfrom dbt.adapters.contracts.relation import RelationConfig\n\nfrom dbt.adapters.trino import constants\n\n\ndef catalog_name(model: RelationConfig) -> Optional[str]:\n    \"\"\"Extract catalog name from model configuration\"\"\"\n    if not hasattr(model, \"config\") or not model.config:\n        return None\n\n    if catalog := model.config.get(CATALOG_INTEGRATION_MODEL_CONFIG_NAME):\n        return catalog\n\n    return constants.DEFAULT_TRINO_CATALOG.name\n"
  },
  {
    "path": "dbt/adapters/trino/relation.py",
    "content": "from dataclasses import dataclass, field\n\nfrom dbt.adapters.base.relation import BaseRelation, EventTimeFilter, Policy\nfrom dbt.adapters.contracts.relation import ComponentName\n\n\n@dataclass(frozen=True, eq=False, repr=False)\nclass TrinoRelation(BaseRelation):\n    quote_policy: Policy = field(default_factory=lambda: Policy())\n    require_alias: bool = False\n\n    # Overridden as Trino converts relation identifiers to lowercase\n    def _is_exactish_match(self, field: ComponentName, value: str) -> bool:\n        return self.path.get_lowered_part(field) == value.lower()\n\n    # Overridden because Trino cannot compare a TIMESTAMP column with a VARCHAR literal.\n    def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str:\n        \"\"\"\n        Returns \"\" if start and end are both None\n        \"\"\"\n        filter = \"\"\n        if event_time_filter.start and event_time_filter.end:\n            filter = f\"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}' and {event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'\"\n        elif event_time_filter.start:\n            filter = f\"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}'\"\n        elif event_time_filter.end:\n            filter = f\"{event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'\"\n\n        return filter\n"
  },
  {
    "path": "dbt/include/trino/__init__.py",
    "content": "import os\n\nPACKAGE_PATH = os.path.dirname(__file__)\n"
  },
  {
    "path": "dbt/include/trino/dbt_project.yml",
    "content": "name: dbt_trino\nversion: 1.0\nconfig-version: 2\n\nmacro-paths: [\"macros\"]\n"
  },
  {
    "path": "dbt/include/trino/macros/adapters.sql",
    "content": "\n-- - get_catalog\n-- - list_relations_without_caching\n-- - get_columns_in_relation\n\n{% macro trino__get_columns_in_relation(relation) -%}\n  {%- set sql -%}\n    select column_name, data_type\n    from {{ relation.information_schema() }}.columns\n    where\n      table_catalog = '{{ relation.database | lower }}'\n      and table_schema = '{{ relation.schema | lower }}'\n      and table_name = '{{ relation.identifier  | lower}}'\n  {%- endset -%}\n  {%- set result = run_query(sql) -%}\n\n  {% set maximum = 10000 %}\n  {% if (result | length) >= maximum %}\n    {% set msg %}\n      Too many columns in relation {{ relation }}! dbt can only get\n      information about relations with fewer than {{ maximum }} columns.\n    {% endset %}\n    {% do exceptions.raise_compiler_error(msg) %}\n  {% endif %}\n\n  {% set columns = [] %}\n  {% for row in result %}\n    {% do columns.append(api.Column.from_description(row['column_name'].lower(), row['data_type'])) %}\n  {% endfor %}\n  {% do return(columns) %}\n{% endmacro %}\n\n\n{% macro trino__list_relations_without_caching(relation) %}\n  {% call statement('list_relations_without_caching', fetch_result=True) -%}\n    select\n      t.table_catalog as database,\n      t.table_name as name,\n      t.table_schema as schema,\n      case when mv.name is not null then 'materialized_view'\n           when t.table_type = 'BASE TABLE' then 'table'\n           when t.table_type = 'VIEW' then 'view'\n           else t.table_type\n      end as table_type\n    from {{ relation.information_schema() }}.tables t\n    left join (\n            select * from system.metadata.materialized_views\n            where catalog_name = '{{ relation.database | lower }}'\n              and schema_name = '{{ relation.schema | lower }}') mv\n          on mv.catalog_name = t.table_catalog and mv.schema_name = t.table_schema and mv.name = t.table_name\n    where t.table_schema = '{{ relation.schema | lower }}'\n  {% endcall %}\n  {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}\n\n\n{% macro trino__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n    {{ adapter.drop_relation(old_relation) }}\n    {{ return(create_csv_table(model, agate_table)) }}\n{% endmacro %}\n\n\n{% macro trino__create_csv_table(model, agate_table) %}\n  {%- set column_override = model['config'].get('column_types', {}) -%}\n  {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n  {% set sql %}\n    create table {{ this.render() }} (\n        {%- for col_name in agate_table.column_names -%}\n            {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n            {%- set type = column_override.get(col_name, inferred_type) -%}\n            {%- set column_name = (col_name | string) -%}\n            {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n        {%- endfor -%}\n    ) {{ properties() }}\n  {% endset %}\n\n  {% call statement('_') -%}\n    {{ sql }}\n  {%- endcall %}\n\n  {{ return(sql) }}\n{% endmacro %}\n\n{% macro properties(temporary=False) %}\n  {%- set _properties = config.get('properties') -%}\n  {%- set table_format = config.get('table_format') -%}\n  {%- set file_format = config.get('file_format') -%}\n  {%- set catalog_relation = adapter.build_catalog_relation(config.model) -%}\n  {%- set catalog_table_format = catalog_relation.table_format -%}\n  {%- set catalog_file_format = catalog_relation.file_format -%}\n  {%- set catalog_storage_uri = catalog_relation.storage_uri -%}\n\n  {%- if file_format -%}\n    {%- if _properties -%}\n      {%- if _properties.format -%}\n        {% set msg %}\n          You can specify either 'file_format' or 'properties.format' configurations, but not both.\n        {% endset %}\n        {% do exceptions.raise_compiler_error(msg) %}\n      {%- else -%}\n        {%- do _properties.update({'format': \"'\" ~ file_format ~ \"'\"}) -%}\n      {%- endif -%}\n    {%- else -%}\n      {%- set _properties = {'format': \"'\" ~ file_format ~ \"'\"} -%}\n    {%- endif -%}\n  {%- elif (not _properties.format) and catalog_file_format -%}\n    {%- if _properties -%}\n      {%- do _properties.update({'format': \"'\" ~ catalog_file_format ~ \"'\"}) -%}\n    {%- else -%}\n      {%- set _properties = {'format': \"'\" ~ catalog_file_format ~ \"'\"} -%}\n    {%- endif -%}\n  {%- endif -%}\n\n  {%- if table_format -%}\n    {%- if _properties -%}\n      {%- if _properties.type -%}\n        {% set msg %}\n          You can specify either 'table_format' or 'properties.type' configurations, but not both.\n        {% endset %}\n        {% do exceptions.raise_compiler_error(msg) %}\n      {%- else -%}\n        {%- do _properties.update({'type': \"'\" ~ table_format ~ \"'\"}) -%}\n      {%- endif -%}\n    {%- else -%}\n      {%- set _properties = {'type': \"'\" ~ table_format ~ \"'\"} -%}\n    {%- endif -%}\n  {%- elif (not _properties.type) and (catalog_table_format is not none) -%}\n    {%- if _properties -%}\n      {%- do _properties.update({'type': \"'\" ~ catalog_table_format ~ \"'\"}) -%}\n    {%- else -%}\n      {%- set _properties = {'type': \"'\" ~ catalog_table_format ~ \"'\"} -%}\n    {%- endif -%}\n  {%- endif -%}\n\n\n  {%- if not _properties.location and catalog_storage_uri -%}\n    {%- if _properties -%}\n      {%- do _properties.update({'location': \"'\" ~ catalog_storage_uri ~ \"'\"}) -%}\n    {%- else -%}\n      {%- set _properties = {'location': \"'\" ~ catalog_storage_uri ~ \"'\"} -%}\n    {%- endif -%}\n  {%- endif -%}\n\n  {%- if temporary -%}\n    {%- if _properties -%}\n      {%- if _properties.location -%}\n          {%- do _properties.update({'location': _properties.location[:-1] ~ \"__dbt_tmp'\"}) -%}\n      {%- endif -%}\n    {%- endif -%}\n  {%- endif -%}\n\n  {%- if _properties is not none -%}\n      WITH (\n          {%- for key, value in _properties.items() -%}\n            {{ key }} = {{ value }}\n            {%- if not loop.last -%}{{ ',\\n  ' }}{%- endif -%}\n          {%- endfor -%}\n      )\n  {%- endif -%}\n{%- endmacro -%}\n\n{% macro comment(comment) %}\n  {%- set persist_docs = model['config'].get('persist_docs') -%}\n  {%- if persist_docs -%}\n    {%- set persist_relation = persist_docs.get('relation') -%}\n    {%- if persist_relation and comment is not none and comment|length > 0 -%}\n        comment '{{ comment | replace(\"'\", \"''\") }}'\n    {%- endif -%}\n  {%- endif -%}\n{%- endmacro -%}\n\n{% macro trino__create_table_as(temporary, relation, sql, on_exists=None) -%}\n\n  {%- set or_replace = ' or replace' if on_exists == 'replace' else '' -%}\n  {%- set if_not_exists = ' if not exists' if on_exists == 'skip' else '' -%}\n\n  {%- set contract_config = config.get('contract') -%}\n  {%- if contract_config.enforced -%}\n\n  create{{ or_replace }} table{{ if_not_exists }}\n    {{ relation }}\n    {{ get_table_columns_and_constraints() }}\n    {{ get_assert_columns_equivalent(sql) }}\n    {%- set sql = get_select_subquery(sql) %}\n    {{ comment(model.get('description')) }}\n    {{ properties(temporary) }}\n  ;\n\n  insert into {{ relation }}\n    (\n      {{ sql }}\n    )\n  ;\n\n  {%- else %}\n\n    create{{ or_replace }} table{{ if_not_exists }} {{ relation }}\n      {{ comment(model.get('description')) }}\n      {{ properties(temporary) }}\n    as (\n      {{ sql }}\n    );\n\n  {%- endif %}\n{% endmacro %}\n\n\n{% macro trino__create_view_as(relation, sql) -%}\n  {%- set view_security = config.get('view_security', 'definer') -%}\n  {%- if view_security not in ['definer', 'invoker'] -%}\n      {%- set log_message = 'Invalid value for view_security (%s) specified. Setting default value (%s).' % (view_security, 'definer') -%}\n      {% do log(log_message) %}\n      {%- set on_table_exists = 'definer' -%}\n  {% endif %}\n  create or replace view\n    {{ relation }}\n  {%- set contract_config = config.get('contract') -%}\n  {%- if contract_config.enforced -%}\n    {{ get_assert_columns_equivalent(sql) }}\n  {%- endif %}\n  security {{ view_security }}\n  as\n    {{ sql }}\n  ;\n{% endmacro %}\n\n\n{%- macro trino__get_drop_sql(relation) -%}\n  {% set relation_type = relation.type|replace(\"_\", \" \") %}\n    drop {{ relation_type }} if exists {{ relation }}\n{% endmacro %}\n\n\n{# see this issue: https://github.com/dbt-labs/dbt/issues/2267 #}\n{% macro trino__information_schema_name(database) -%}\n  {%- if database -%}\n    {{ database }}.INFORMATION_SCHEMA\n  {%- else -%}\n    INFORMATION_SCHEMA\n  {%- endif -%}\n{%- endmacro %}\n\n\n{# On Trino, 'cascade' is not supported so we have to manually cascade. #}\n{% macro trino__drop_schema(relation) -%}\n  {% for row in list_relations_without_caching(relation) %}\n    {% set rel_db = row[0] %}\n    {% set rel_identifier = row[1] %}\n    {% set rel_schema = row[2] %}\n    {% set rel_type = api.Relation.get_relation_type(row[3]) %}\n    {% set existing = api.Relation.create(database=rel_db, schema=rel_schema, identifier=rel_identifier, type=rel_type) %}\n    {% do drop_relation(existing) %}\n  {% endfor %}\n  {%- call statement('drop_schema') -%}\n    drop schema if exists {{ relation }}\n  {% endcall %}\n{% endmacro %}\n\n\n{% macro trino__rename_relation(from_relation, to_relation) -%}\n  {% set from_relation_type = from_relation.type|replace(\"_\", \" \") %}\n  {% call statement('rename_relation') -%}\n    alter {{ from_relation_type }} {{ from_relation }} rename to {{ to_relation }}\n  {%- endcall %}\n{% endmacro %}\n\n\n{% macro trino__alter_relation_comment(relation, relation_comment) -%}\n  comment on {{ relation.type }} {{ relation }} is '{{ relation_comment | replace(\"'\", \"''\") }}';\n{% endmacro %}\n\n\n{% macro trino__alter_column_comment(relation, column_dict) %}\n  {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n  {% for column_name in column_dict if (column_name in existing_columns) %}\n    {% set comment = column_dict[column_name]['description'] %}\n    {%- if comment|length -%}\n      comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is '{{ comment | replace(\"'\", \"''\") }}';\n    {%- else -%}\n      comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is null;\n    {%- endif -%}\n  {% endfor %}\n{% endmacro %}\n\n\n{% macro trino__list_schemas(database) -%}\n  {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n    select schema_name\n    from {{ information_schema_name(database) }}.schemata\n  {% endcall %}\n  {{ return(load_result('list_schemas').table) }}\n{% endmacro %}\n\n\n{% macro trino__check_schema_exists(information_schema, schema) -%}\n  {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) -%}\n        select count(*)\n        from {{ information_schema }}.schemata\n        where catalog_name = '{{ information_schema.database }}'\n          and schema_name = '{{ schema | lower }}'\n  {%- endcall %}\n  {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}\n\n{% macro trino__get_binding_char() %}\n  {%- if target.prepared_statements_enabled|as_bool -%}\n    {{ return('?') }}\n  {%- else -%}\n    {{ return('%s') }}\n  {%- endif -%}\n{% endmacro %}\n\n\n{% macro trino__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n  {% if add_columns is none %}\n    {% set add_columns = [] %}\n  {% endif %}\n  {% if remove_columns is none %}\n    {% set remove_columns = [] %}\n  {% endif %}\n\n  {% for column in add_columns %}\n    {% set sql -%}\n      alter {{ relation.type }} {{ relation }} add column {{ adapter.quote(column.name) }} {{ column.data_type }}\n    {%- endset -%}\n    {% do run_query(sql) %}\n  {% endfor %}\n\n  {% for column in remove_columns %}\n    {% set sql -%}\n      alter {{ relation.type }} {{ relation }} drop column {{ adapter.quote(column.name) }}\n    {%- endset -%}\n    {% do run_query(sql) %}\n  {% endfor %}\n{% endmacro %}\n\n\n{% macro create_or_replace_view() %}\n  {%- set identifier = model['alias'] -%}\n\n  {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n  {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n  {%- set target_relation = api.Relation.create(\n      identifier=identifier, schema=schema, database=database,\n      type='view') -%}\n  {% set grant_config = config.get('grants') %}\n\n  {{ run_hooks(pre_hooks) }}\n\n  -- If there is another object delete it\n  {%- if old_relation is not none and not old_relation.is_view -%}\n    {{ handle_existing_table(should_full_refresh(), old_relation) }}\n  {%- endif -%}\n\n  -- build model\n  {% call statement('main') -%}\n    {{ get_create_view_as_sql(target_relation, sql) }}\n  {%- endcall %}\n\n  {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n  {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n  {{ run_hooks(post_hooks) }}\n\n  {{ return({'relations': [target_relation]}) }}\n{% endmacro %}\n\n{% macro trino__alter_column_type(relation, column_name, new_column_type) %}\n  {#\n    1. Create a new column (w/ temp name and correct type)\n    2. Copy data over to it\n    3. Drop the existing column\n    4. Rename the new column to existing column\n  #}\n  {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n  {% call statement('alter_column_type') %}\n    alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n    update {{ relation }} set {{ adapter.quote(tmp_column) }} = CAST({{ adapter.quote(column_name) }} AS {{ new_column_type }});\n    alter table {{ relation }} drop column {{ adapter.quote(column_name) }};\n    alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n  {% endcall %}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/apply_grants.sql",
    "content": "{% macro trino__get_show_grant_sql(relation) -%}\n    select\n        grantee,\n        lower(privilege_type) as privilege_type\n    from information_schema.table_privileges\n    where table_catalog = '{{ relation.database }}'\n    and table_schema = '{{ relation.schema }}'\n    and table_name = '{{ relation.identifier }}'\n{%- endmacro %}\n\n{% macro trino__copy_grants() %}\n    {#\n        -- This macro should return true or false depending on the answer to\n        -- following question:\n        -- when an object is fully replaced on your database, do grants copy over?\n        -- e.g. on Postgres this is never true,\n        -- on Spark this is different for views vs. non-Delta tables vs. Delta tables,\n        -- on Snowflake it depends on the user-supplied copy_grants configuration.\n        -- true by default, which means “play it safe”: grants MIGHT have copied over,\n        -- so dbt will run an extra query to check them + calculate diffs.\n    #}\n    {{ return(False) }}\n{% endmacro %}\n\n{%- macro trino__get_grant_sql(relation, privilege, grantees) -%}\n    grant {{ privilege }} on {{ relation }} to {{ adapter.quote(grantees[0]) }}\n{%- endmacro %}\n\n{%- macro trino__support_multiple_grantees_per_dcl_statement() -%}\n    {#\n        -- This macro should return true or false depending on the answer to\n        -- following question:\n        -- does this database support grant {privilege} to user_a, user_b, ...?\n        -- or do user_a + user_b need their own separate grant statements?\n    #}\n    {{ return(False) }}\n{%- endmacro -%}\n\n{% macro trino__call_dcl_statements(dcl_statement_list) %}\n    {% for dcl_statement in dcl_statement_list %}\n        {% call statement('grant_or_revoke') %}\n            {{ dcl_statement }}\n        {% endcall %}\n    {% endfor %}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/catalog.sql",
    "content": "{% macro trino__get_catalog(information_schema, schemas) -%}\n\n    {% set query %}\n        with tables as (\n            {{ trino__get_catalog_tables_sql(information_schema) }}\n            {{ trino__get_catalog_schemas_where_clause_sql(schemas) }}\n        ),\n        columns as (\n            {{ trino__get_catalog_columns_sql(information_schema) }}\n            {{ trino__get_catalog_schemas_where_clause_sql(schemas) }}\n        ),\n        table_comment as (\n            {{ trino__get_catalog_table_comment_schemas_sql(information_schema, schemas) }}\n        )\n        {{ trino__get_catalog_results_sql() }}\n    {%- endset -%}\n\n    {{ return(run_query(query)) }}\n\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_relations(information_schema, relations) -%}\n\n    {% set query %}\n        with tables as (\n            {{ trino__get_catalog_tables_sql(information_schema) }}\n            {{ trino__get_catalog_relations_where_clause_sql(relations) }}\n        ),\n        columns as (\n            {{ trino__get_catalog_columns_sql(information_schema) }}\n            {{ trino__get_catalog_relations_where_clause_sql(relations) }}\n        ),\n        table_comment as (\n            {{ trino__get_catalog_table_comment_relations_sql(information_schema, relations) }}\n        )\n        {{ trino__get_catalog_results_sql() }}\n    {%- endset -%}\n\n    {{ return(run_query(query)) }}\n\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_tables_sql(information_schema) -%}\n    select\n        table_catalog as \"table_database\",\n        table_schema as \"table_schema\",\n        table_name as \"table_name\",\n        table_type as \"table_type\",\n        null as \"table_owner\"\n    from {{ information_schema }}.tables\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_columns_sql(information_schema) -%}\n    select\n        table_catalog as \"table_database\",\n        table_schema as \"table_schema\",\n        table_name as \"table_name\",\n        column_name as \"column_name\",\n        ordinal_position as \"column_index\",\n        data_type as \"column_type\",\n        comment as \"column_comment\"\n    from {{ information_schema }}.columns\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_table_comment_schemas_sql(information_schema, schemas) -%}\n    select\n        catalog_name as \"table_database\",\n        schema_name as \"table_schema\",\n        table_name as \"table_name\",\n        comment as \"table_comment\"\n    from system.metadata.table_comments\n    where\n        catalog_name = '{{ information_schema.database }}'\n        and\n        schema_name != 'information_schema'\n        and\n        schema_name in ('{{ schemas | join(\"','\") | lower }}')\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_table_comment_relations_sql(information_schema, relations) -%}\n    {%- for relation in relations %}\n    select\n        catalog_name as \"table_database\",\n        schema_name as \"table_schema\",\n        table_name as \"table_name\",\n        comment as \"table_comment\"\n    from system.metadata.table_comments\n    where\n        catalog_name = '{{ information_schema.database }}'\n        and\n        schema_name != 'information_schema'\n        and\n        {% if relation.schema and relation.identifier %}\n                (\n                    schema_name = '{{ relation.schema | lower }}'\n                    and table_name = '{{ relation.identifier | lower }}'\n                )\n            {% elif relation.schema %}\n                (\n                    schema_name = '{{ relation.schema | lower }}'\n                )\n            {% else %}\n                {% do exceptions.raise_compiler_error(\n                    '`get_catalog_relations` requires a list of relations, each with a schema'\n                ) %}\n        {% endif %}\n    {%- if not loop.last %}\n    union all\n    {% endif -%}\n    {%- endfor -%}\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_results_sql() -%}\n        select\n            table_database,\n            table_schema,\n            table_name,\n            table_type,\n            table_owner,\n            column_name,\n            column_index,\n            column_type,\n            column_comment,\n            table_comment\n        from tables\n        join columns using (\"table_database\", \"table_schema\", \"table_name\")\n        join table_comment using (\"table_database\", \"table_schema\", \"table_name\")\n        order by \"column_index\"\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_schemas_where_clause_sql(schemas) -%}\n    where\n        table_schema != 'information_schema'\n        and\n        table_schema in ('{{ schemas | join(\"','\") | lower }}')\n{%- endmacro %}\n\n\n{% macro trino__get_catalog_relations_where_clause_sql(relations) -%}\n    where\n        table_schema != 'information_schema'\n        and\n        (\n            {%- for relation in relations -%}\n                {% if relation.schema and relation.identifier %}\n                    (\n                        table_schema = '{{ relation.schema | lower }}'\n                        and table_name = '{{ relation.identifier | lower }}'\n                    )\n                {% elif relation.schema %}\n                    (\n                        table_schema = '{{ relation.schema | lower }}'\n                    )\n                {% else %}\n                    {% do exceptions.raise_compiler_error(\n                        '`get_catalog_relations` requires a list of relations, each with a schema'\n                    ) %}\n                {% endif %}\n\n                {%- if not loop.last %} or {% endif -%}\n            {%- endfor -%}\n        )\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/incremental.sql",
    "content": "{% macro get_incremental_tmp_relation_type(strategy, unique_key, language) %}\n\n  /* {#\n       If we are running multiple statements (DELETE + INSERT),\n       we must first save the model query results as a temporary table\n       in order to guarantee consistent inputs to both statements.\n\n       If we are running a single statement (MERGE or INSERT alone),\n       we can save the model query definition as a view instead,\n       for faster overall incremental processing.\n  #} */\n  {%- set views_enabled = config.get('views_enabled', true) -%}\n\n  {% if language == 'sql' and (views_enabled and (strategy in ('default', 'append', 'merge') or (unique_key is none))) %}\n    {{ return('view') }}\n  {% else %}  {#--  play it safe -- #}\n    {{ return('table') }}\n  {% endif %}\n{% endmacro %}\n\n{% materialization incremental, adapter='trino', supported_languages=['sql'] -%}\n\n  {#-- configs --#}\n  {%- set unique_key = config.get('unique_key') -%}\n  {%- set full_refresh_mode = (should_full_refresh()) -%}\n  {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n  {%- set language = model['language'] -%}\n  {%- set on_table_exists = config.get('on_table_exists', 'rename') -%}\n  {% if on_table_exists not in ['rename', 'drop', 'replace'] %}\n      {%- set log_message = 'Invalid value for on_table_exists (%s) specified. Setting default value (%s).' % (on_table_exists, 'rename') -%}\n      {% do log(log_message) %}\n      {%- set on_table_exists = 'rename' -%}\n  {% endif %}\n  {#-- Get the incremental_strategy and the macro to use for the strategy --#}\n  {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n  {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n  {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n\n  {#-- relations --#}\n  {%- set existing_relation = load_cached_relation(this) -%}\n  {%- set target_relation = this.incorporate(type='table') -%}\n  {#-- The temp relation will be a view (faster) or temp table, depending on upsert/merge strategy --#}\n  {%- set tmp_relation_type = get_incremental_tmp_relation_type(incremental_strategy, unique_key, language) -%}\n  {%- set tmp_relation = make_temp_relation(this).incorporate(type=tmp_relation_type) -%}\n  {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n  {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n  {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n  {#-- the temp_ and backup_ relation should not already exist in the database; get_relation\n  -- will return None in that case. Otherwise, we get a relation that we can drop\n  -- later, before we try to use this name for the current operation.#}\n  {%- set preexisting_tmp_relation = load_cached_relation(tmp_relation)-%}\n  {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n  {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n\n  {#--- grab current tables grants config for comparision later on#}\n  {% set grant_config = config.get('grants') %}\n\n  -- drop the temp relations if they exist already in the database\n  {{ drop_relation_if_exists(preexisting_tmp_relation) }}\n  {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n  {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n  {{ run_hooks(pre_hooks) }}\n\n  {% if existing_relation is none %}\n    {%- call statement('main', language=language) -%}\n      {{ create_table_as(False, target_relation, compiled_code, language) }}\n    {%- endcall -%}\n\n  {% elif existing_relation.is_view %}\n    {#-- Can't overwrite a view with a table - we must drop --#}\n    {{ log(\"Dropping relation \" ~ target_relation ~ \" because it is a view and this model is a table.\") }}\n    {% do adapter.drop_relation(existing_relation) %}\n    {%- call statement('main', language=language) -%}\n      {{ create_table_as(False, target_relation, compiled_code, language) }}\n    {%- endcall -%}\n  {% elif full_refresh_mode %}\n    {#-- Create table with given `on_table_exists` mode #}\n    {% do on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) %}\n\n  {% else %}\n    {#-- Create the temp relation, either as a view or as a temp table --#}\n    {% if tmp_relation_type == 'view' %}\n        {%- call statement('create_tmp_relation') -%}\n          {{ create_view_as(tmp_relation, compiled_code) }}\n        {%- endcall -%}\n    {% else %}\n        {%- call statement('create_tmp_relation', language=language) -%}\n          {{ create_table_as(True, tmp_relation, compiled_code, language) }}\n        {%- endcall -%}\n    {% endif %}\n\n    {% do adapter.expand_target_column_types(\n           from_relation=tmp_relation,\n           to_relation=target_relation) %}\n    {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n    {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n    {% if not dest_columns %}\n      {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n    {% endif %}\n\n    {#-- Build the sql --#}\n    {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': tmp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n    {%- call statement('main') -%}\n      {{ strategy_sql_macro_func(strategy_arg_dict) }}\n    {%- endcall -%}\n  {% endif %}\n    {% do drop_relation_if_exists(tmp_relation) %}\n  {{ run_hooks(post_hooks) }}\n\n  {% set should_revoke =\n   should_revoke(existing_relation.is_table, full_refresh_mode) %}\n  {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n  {% do persist_docs(target_relation, model) %}\n\n  {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}\n\n{% macro trino__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n    {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n    {% if unique_key %}\n        {% if unique_key is sequence and unique_key is not string %}\n            delete from {{ target }}\n            where exists (\n                select 1\n                from {{ source }}\n                where\n                {% for key in unique_key %}\n                    {{ target }}.{{ key }} = {{ source }}.{{ key }}\n                    {{ \"and \" if not loop.last }}\n                {% endfor %}\n                )\n                {% if incremental_predicates %}\n                    {% for predicate in incremental_predicates %}\n                        and {{ predicate }}\n                    {% endfor %}\n                {% endif %}\n            ;\n        {% else %}\n            delete from {{ target }}\n            where (\n                {{ unique_key }}) in (\n                select {{ unique_key }}\n                from {{ source }}\n            )\n            {%- if incremental_predicates %}\n                {% for predicate in incremental_predicates %}\n                    and {{ predicate }}\n                {% endfor %}\n            {%- endif -%};\n\n        {% endif %}\n    {% endif %}\n\n    insert into {{ target }} ({{ dest_cols_csv }})\n    (\n        select {{ dest_cols_csv }}\n        from {{ source }}\n    )\n{%- endmacro %}\n\n{% macro trino__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n    {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n    {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n    {%- set dest_cols_csv_source = dest_cols_csv.split(', ') -%}\n    {%- set merge_update_columns = config.get('merge_update_columns') -%}\n    {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n    {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n    {%- set sql_header = config.get('sql_header', none) -%}\n\n    {% if unique_key %}\n        {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n            {% for key in unique_key %}\n                {% set this_key_match %}\n                    DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n                {% endset %}\n                {% do predicates.append(this_key_match) %}\n            {% endfor %}\n        {% else %}\n            {% set unique_key_match %}\n                DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n            {% endset %}\n            {% do predicates.append(unique_key_match) %}\n        {% endif %}\n\n        {{ sql_header if sql_header is not none }}\n\n        merge into {{ target }} as DBT_INTERNAL_DEST\n            using {{ source }} as DBT_INTERNAL_SOURCE\n            on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n        {% if unique_key %}\n        when matched then update set\n            {% for column_name in update_columns -%}\n                {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n                {%- if not loop.last %}, {%- endif %}\n            {%- endfor %}\n        {% endif %}\n\n        when not matched then insert\n            ({{ dest_cols_csv }})\n        values\n            ({% for dest_cols in dest_cols_csv_source -%}\n                DBT_INTERNAL_SOURCE.{{ dest_cols }}\n                {%- if not loop.last %}, {% endif %}\n            {%- endfor %})\n\n    {% else %}\n        insert into {{ target }} ({{ dest_cols_csv }})\n        (\n            select {{ dest_cols_csv }}\n            from {{ source }}\n        )\n    {% endif %}\n{% endmacro %}\n\n\n{% macro trino__get_incremental_microbatch_sql(arg_dict) %}\n    {%- set target = arg_dict[\"target_relation\"] -%}\n    {%- set source = arg_dict[\"temp_relation\"] -%}\n    {%- set dest_columns = arg_dict[\"dest_columns\"] -%}\n    {%- set incremental_predicates = [] if arg_dict.get('incremental_predicates') is none else arg_dict.get('incremental_predicates') -%}\n\n    {#-- Add additional incremental_predicates to filter for batch --#}\n    {% if model.config.get(\"__dbt_internal_microbatch_event_time_start\") -%}\n        {% do incremental_predicates.append(model.config.event_time ~ \" >= TIMESTAMP '\" ~ model.config.__dbt_internal_microbatch_event_time_start ~ \"'\") %}\n    {% endif %}\n    {% if model.config.get(\"__dbt_internal_microbatch_event_time_end\") -%}\n        {% do incremental_predicates.append(model.config.event_time ~ \" < TIMESTAMP '\" ~ model.config.__dbt_internal_microbatch_event_time_end ~ \"'\") %}\n    {% endif %}\n    {% do arg_dict.update({'incremental_predicates': incremental_predicates}) %}\n\n    delete from {{ target }}\n    where (\n    {% for predicate in incremental_predicates %}\n        {%- if not loop.first %}and {% endif -%} {{ predicate }}\n    {% endfor %}\n    );\n\n    {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n    insert into {{ target }} ({{ dest_cols_csv }})\n    (\n        select {{ dest_cols_csv }}\n        from {{ source }}\n    )\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/materialized_view.sql",
    "content": "{%- macro trino__get_create_materialized_view_as_sql(target_relation, sql) -%}\n  create materialized view {{ target_relation }}\n  {%- set grace_period = config.get('grace_period') %}\n  {%- if grace_period is not none %}\n    grace period {{ grace_period }}\n  {%- endif %}\n    {{ properties() }}\n  as\n  {{ sql }}\n  ;\n{%- endmacro -%}\n\n\n{% macro trino__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n    {{- trino__get_create_materialized_view_as_sql(intermediate_relation, sql) }}\n\n    {% if existing_relation is not none %}\n        {{ log(\"Found a \" ~ existing_relation.type ~ \" with same name. Will drop it\", info=true) }}\n        alter {{ existing_relation.type|replace(\"_\", \" \") }} {{ existing_relation }} rename to {{ backup_relation }};\n    {% endif %}\n\n    alter materialized view {{ intermediate_relation }} rename to {{ relation }};\n\n{% endmacro %}\n\n\n{#-- Applying materialized view configuration changes via alter is not supported. --#}\n{#-- Return None, so `refresh_materialized_view` macro is invoked even --#}\n{#-- if materialized view configuration changes are made. --#}\n{#-- After configuration change, full refresh needs to be performed on mv. --#}\n{% macro trino__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n    {% do return(None) %}\n{% endmacro %}\n\n\n{%- macro trino__refresh_materialized_view(relation) -%}\n    refresh materialized view {{ relation }}\n{%- endmacro -%}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/seeds/helpers.sql",
    "content": "{% macro trino__get_batch_size() %}\n  {{ return(1000) }}\n{% endmacro %}\n\n\n{% macro create_bindings(row, types) %}\n  {% set values = [] %}\n  {% set re = modules.re %}\n\n  {%- for item in row -%}\n      {%- set type = types[loop.index0] -%}\n      {%- set match_type = re.match(\"(\\w+)(\\(.*\\))?\", type) -%}\n      {%- if item is not none and item is string and 'interval' in match_type.group(1) -%}\n        {%- do values.append((none, match_type.group(1).upper() ~ \" \" ~ item)) -%}\n      {%- elif item is not none and item is string and 'varchar' not in type.lower() -%}\n        {%- do values.append((none, match_type.group(1).upper() ~ \" '\" ~ item ~ \"'\")) -%}\n      {%- elif item is not none and 'varchar' in type.lower() -%}\n        {%- do values.append((get_binding_char(), item|string())) -%}\n      {%- else -%}\n        {%- do values.append((get_binding_char(), item)) -%}\n      {% endif -%}\n  {%- endfor -%}\n  {{ return(values) }}\n{% endmacro %}\n\n\n{#\n  We need to override the default__load_csv_rows macro as Trino requires values to be typed according to the column type\n  as in following example:\n\n  create table \"memory\".\"default\".\"string_type\" (\"varchar_example\" varchar,\"varchar_n_example\" varchar(10),\"char_example\" char,\"char_n_example\" char(10),\"varbinary_example\" varbinary,\"json_example\" json)\n\n  insert into \"memory\".\"default\".\"string_type\" (\"varchar_example\", \"varchar_n_example\", \"char_example\", \"char_n_example\", \"varbinary_example\", \"json_example\") values\n          ('test','abc',CHAR 'd',CHAR 'ghi',VARBINARY '65683F',JSON '{\"k1\":1,\"k2\":23,\"k3\":456}'),(NULL,NULL,NULL,NULL,NULL,NULL)\n\n  Usually seed row's values through agate_table's data type detection and come through as python types, in this case typing is\n  handled by using bindings in `ConnectionWrapper.execute`. However dbt also allows you to override the data types of the created table\n  through setting `column_types`, this case is handled here where we have the type information of the seed table.\n#}\n\n{% macro trino__load_csv_rows(model, agate_table) %}\n  {% set column_override = model['config'].get('column_types', {}) %}\n  {% set types = [] %}\n\n  {%- for col_name in agate_table.column_names -%}\n      {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n      {%- set type = column_override.get(col_name, inferred_type) -%}\n      {%- do types.append(type) -%}\n  {%- endfor -%}\n\n  {% set batch_size = get_batch_size() %}\n\n  {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n  {% set bindings = [] %}\n\n  {% set statements = [] %}\n\n  {% for chunk in agate_table.rows | batch(batch_size) %}\n      {% set bindings = [] %}\n\n      {% set sql %}\n          insert into {{ this.render() }} ({{ cols_sql }}) values\n          {% for row in chunk -%}\n              ({%- for tuple in create_bindings(row, types) -%}\n                  {%- if tuple.0 is not none  -%}\n                  {{ tuple.0 }}\n                  {%- do bindings.append(tuple.1) -%}\n                  {%- else -%}\n                  {{ tuple.1 }}\n                  {%- endif -%}\n                  {%- if not loop.last%},{%- endif %}\n              {%- endfor -%})\n              {%- if not loop.last%},{%- endif %}\n          {%- endfor %}\n      {% endset %}\n\n      {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n      {% if loop.index0 == 0 %}\n          {% do statements.append(sql) %}\n      {% endif %}\n  {% endfor %}\n\n  {# Return SQL so we can render it out into the compiled files #}\n  {{ return(statements[0]) }}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/snapshot.sql",
    "content": "{% materialization snapshot, adapter='trino' %}\n    {% if config.get('properties') %}\n        {% if config.get('properties').get('location') %}\n            {%- do exceptions.raise_compiler_error(\"Specifying 'location' property in snapshots is not supported.\") -%}\n        {% endif %}\n    {% endif %}\n    {{ return(materialization_snapshot_default()) }}\n{% endmaterialization %}\n\n{% macro trino__snapshot_hash_arguments(args) -%}\n  lower(to_hex(md5(to_utf8(concat({%- for arg in args -%}\n    coalesce(cast({{ arg }} as varchar), ''){% if not loop.last %}, '|',{% endif -%}\n  {%- endfor -%}\n  )))))\n{%- endmacro %}\n\n{% macro trino__post_snapshot(staging_relation) %}\n  -- Clean up the snapshot temp table\n  {% do drop_relation(staging_relation) %}\n{% endmacro %}\n\n{% macro trino__snapshot_merge_sql(target, source, insert_cols) -%}\n    {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n    {%- set columns = config.get(\"snapshot_table_column_names\") or get_snapshot_table_column_names() -%}\n\n    merge into {{ target.render() }} as DBT_INTERNAL_DEST\n    using {{ source }} as DBT_INTERNAL_SOURCE\n    on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }}\n\n    when matched\n     {% if config.get(\"dbt_valid_to_current\") %}\n       and (DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or\n            DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null)\n     {% else %}\n       and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null\n     {% endif %}\n     and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n        then update\n        set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }}\n\n    when not matched\n     and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n        then insert ({{ insert_cols_csv }})\n        values ({% for insert_col in insert_cols -%}\n            DBT_INTERNAL_SOURCE.{{ insert_col }}\n            {%- if not loop.last %}, {% endif %}\n            {%- endfor %})\n\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/table.sql",
    "content": "{% materialization table, adapter = 'trino' %}\n  {%- set on_table_exists = config.get('on_table_exists', 'rename') -%}\n  {% if on_table_exists not in ['rename', 'drop', 'replace', 'skip'] %}\n      {%- set log_message = 'Invalid value for on_table_exists (%s) specified. Setting default value (%s).' % (on_table_exists, 'rename') -%}\n      {% do log(log_message) %}\n      {%- set on_table_exists = 'rename' -%}\n  {% endif %}\n\n  {%- set existing_relation = load_cached_relation(this) -%}\n  {%- set target_relation = this.incorporate(type='table') %}\n\n  {% if on_table_exists == 'rename' %}\n      {%- set intermediate_relation =  make_intermediate_relation(target_relation) -%}\n      -- the intermediate_relation should not already exist in the database; get_relation\n      -- will return None in that case. Otherwise, we get a relation that we can drop\n      -- later, before we try to use this name for the current operation\n      {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n\n      {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n      {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n      -- as above, the backup_relation should not already exist\n      {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n\n      -- drop the temp relations if they exist already in the database\n      {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n      {{ drop_relation_if_exists(preexisting_backup_relation) }}\n  {% endif %}\n\n  {{ run_hooks(pre_hooks) }}\n\n  -- grab current tables grants config for comparision later on\n  {% set grant_config = config.get('grants') %}\n\n  {#-- Create table with given `on_table_exists` mode #}\n  {% do on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) %}\n\n  {% do persist_docs(target_relation, model) %}\n\n  {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n  {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n  {{ run_hooks(post_hooks) }}\n\n  {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}\n\n\n{% macro on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) -%}\n  {#-- Create table with given `on_table_exists` mode #}\n  {% if on_table_exists == 'rename' %}\n\n      {#-- table does not exists #}\n      {% if existing_relation is none %}\n          {% call statement('main') -%}\n              {{ create_table_as(False, target_relation, sql) }}\n          {%- endcall %}\n\n      {#-- table does exists #}\n      {% else %}\n          {#-- build modeldock #}\n          {% call statement('main') -%}\n              {{ create_table_as(False, intermediate_relation, sql) }}\n          {%- endcall %}\n\n          {#-- cleanup #}\n          {{ adapter.rename_relation(existing_relation, backup_relation) }}\n          {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n          {#-- finally, drop the existing/backup relation after the commit #}\n          {{ drop_relation_if_exists(backup_relation) }}\n      {% endif %}\n\n  {% elif on_table_exists == 'drop' %}\n      {#-- cleanup #}\n      {%- if existing_relation is not none -%}\n          {{ adapter.drop_relation(existing_relation) }}\n      {%- endif -%}\n\n      {#-- build model #}\n      {% call statement('main') -%}\n        {{ create_table_as(False, target_relation, sql) }}\n      {%- endcall %}\n\n  {% elif on_table_exists == 'replace' %}\n      {#-- build model #}\n      {% call statement('main') -%}\n        {{ create_table_as(False, target_relation, sql, 'replace') }}\n      {%- endcall %}\n\n  {% elif on_table_exists == 'skip' %}\n      {#-- build model #}\n      {% call statement('main') -%}\n        {{ create_table_as(False, target_relation, sql, 'skip') }}\n      {%- endcall %}\n\n  {% endif %}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/materializations/view.sql",
    "content": "{% materialization view, adapter='trino' -%}\n    {% set to_return = create_or_replace_view() %}\n    {% set target_relation = this.incorporate(type='view') %}\n\n    {% do persist_docs(target_relation, model) %}\n\n    {% do return(to_return) %}\n{%- endmaterialization %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/any_value.sql",
    "content": "{% macro trino__any_value(expression) -%}\n    min({{ expression }})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/array_append.sql",
    "content": "{% macro trino__array_append(array, new_element) -%}\n    {{ array_concat(array, array_construct([new_element])) }}\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/array_concat.sql",
    "content": "{% macro trino__array_concat(array_1, array_2) -%}\n    concat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/array_construct.sql",
    "content": "{% macro trino__array_construct(inputs, data_type) -%}\n    {%- if not inputs -%}\n    null\n    {%- else -%}\n    array[ {{ inputs|join(' , ') }} ]\n    {%- endif -%}\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/bool_or.sql",
    "content": "{% macro trino__bool_or(expression) -%}\n    bool_or({{ expression }})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/datatypes.sql",
    "content": "{% macro trino__type_float() -%}\n    double\n{%- endmacro %}\n\n{% macro trino__type_string() -%}\n    varchar\n{%- endmacro %}\n\n{% macro trino__type_numeric() -%}\n    decimal(28, 6)\n{%- endmacro %}\n\n{%- macro trino__type_int() -%}\n    integer\n{%- endmacro -%}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/date_spine.sql",
    "content": "{% macro trino__date_spine(datepart, start_date, end_date) %}\n\n\n    {# call as follows:\n\n    date_spine(\n        \"day\",\n        \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n        \"dbt.dateadd(week, 1, current_date)\"\n    ) #}\n\n\n    with rawdata as (\n\n        {{dbt.generate_series(\n            dbt.get_intervals_between(start_date, end_date, datepart)\n        )}}\n\n    ),\n\n    all_periods as (\n\n        select (\n            {{\n                dbt.dateadd(\n                    datepart,\n                    \"row_number() over (order by 1) - 1\",\n                \"cast(\" ~ start_date ~ \" as date)\"\n                )\n            }}\n        ) as date_{{datepart}}\n        from rawdata\n\n    ),\n\n    filtered as (\n\n        select *\n        from all_periods\n    where date_{{datepart}} <= cast({{ end_date }} as date)\n\n    )\n\n    select * from filtered\n\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/date_trunc.sql",
    "content": "{% macro trino__date_trunc(datepart, date) -%}\n    date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/dateadd.sql",
    "content": "{% macro trino__dateadd(datepart, interval, from_date_or_timestamp) -%}\n    date_add('{{ datepart }}', {{ interval }}, {{ from_date_or_timestamp }})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/datediff.sql",
    "content": "{% macro trino__datediff(first_date, second_date, datepart) -%}\n    {%- if datepart == 'year' -%}\n        (year(CAST({{ second_date }} AS TIMESTAMP)) - year(CAST({{ first_date }} AS TIMESTAMP)))\n    {%- elif datepart == 'quarter' -%}\n        ({{ datediff(first_date, second_date, 'year') }} * 4) + quarter(CAST({{ second_date }} AS TIMESTAMP)) - quarter(CAST({{ first_date }} AS TIMESTAMP))\n    {%- elif datepart == 'month' -%}\n        ({{ datediff(first_date, second_date, 'year') }} * 12) + month(CAST({{ second_date }} AS TIMESTAMP)) - month(CAST({{ first_date }} AS TIMESTAMP))\n    {%- elif datepart == 'day' -%}\n        ((to_milliseconds((CAST(CAST({{ second_date }} AS TIMESTAMP) AS DATE) - CAST(CAST({{ first_date }} AS TIMESTAMP) AS DATE)))) / 86400000)\n    {%- elif datepart == 'week' -%}\n         ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n            when dow(CAST({{first_date}} AS TIMESTAMP)) <= dow(CAST({{second_date}} AS TIMESTAMP)) then\n                case when {{first_date}} <= {{second_date}} then 0 else -1 end\n            else\n                case when {{first_date}} <= {{second_date}} then 1 else 0 end\n        end)\n    {%- elif datepart == 'hour' -%}\n        ({{ datediff(first_date, second_date, 'day') }} * 24 + hour(CAST({{ second_date }} AS TIMESTAMP)) - hour(CAST({{ first_date }} AS TIMESTAMP)))\n    {%- elif datepart == 'minute' -%}\n        ({{ datediff(first_date, second_date, 'hour') }} * 60 + minute(CAST({{ second_date }} AS TIMESTAMP)) - minute(CAST({{ first_date }} AS TIMESTAMP)))\n    {%- elif datepart == 'second' -%}\n        ({{ datediff(first_date, second_date, 'minute') }} * 60 + second(CAST({{ second_date }} AS TIMESTAMP)) - second(CAST({{ first_date }} AS TIMESTAMP)))\n    {%- elif datepart == 'millisecond' -%}\n        (to_milliseconds((CAST({{ second_date }} AS TIMESTAMP) - CAST({{ first_date }} AS TIMESTAMP))))\n    {%- else -%}\n        {% if execute %}{{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in Trino: {!r}\".format(datepart)) }}{% endif %}\n    {%- endif -%}\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/hash.sql",
    "content": "{% macro trino__hash(field) -%}\n    lower(to_hex(md5(to_utf8(cast({{field}} as varchar)))))\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/listagg.sql",
    "content": "{% macro trino__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n    {% set collect_list %} array_agg({{ measure }} {% if order_by_clause -%}{{ order_by_clause }}{%- endif %}) {% endset %}\n    {% set limited %} slice({{ collect_list }}, 1, {{ limit_num }}) {% endset %}\n    {% set collected = limited if limit_num else collect_list %}\n    {% set final %} array_join({{ collected }}, {{ delimiter_text }}) {% endset %}\n    {% do return(final) %}\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/right.sql",
    "content": "{% macro trino__right(string_text, length_expression) %}\n    case when {{ length_expression }} = 0\n        then ''\n    else\n        substr({{ string_text }}, -1 * ({{ length_expression }}))\n    end\n{%- endmacro -%}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/safe_cast.sql",
    "content": "{% macro trino__safe_cast(field, type) -%}\n    try_cast({{field}} as {{type}})\n{%- endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/split_part.sql",
    "content": "{% macro trino__split_part(string_text, delimiter_text, part_number) %}\n  {% if part_number >= 0 %}\n    {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n  {% else %}\n    {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n  {% endif %}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/macros/utils/timestamps.sql",
    "content": "{% macro trino__current_timestamp() -%}\n    current_timestamp\n{%- endmacro %}\n\n{% macro trino__snapshot_string_as_time(timestamp) %}\n    {%- set result = \"timestamp '\" ~ timestamp ~ \"'\" -%}\n    {{ return(result) }}\n{% endmacro %}\n"
  },
  {
    "path": "dbt/include/trino/sample_profiles.yml",
    "content": "default:\n  outputs:\n\n    dev:\n      type: trino\n      method: none  # optional, one of {none | ldap | kerberos}\n      user: [dev_user]\n      password: [password]  # required if method is ldap or kerberos\n      database: [database name]\n      host: [hostname]\n      port: [port number]\n      schema: [dev_schema]\n      threads: [1 or more]\n\n    prod:\n      type: trino\n      method: none  # optional, one of {none | ldap | kerberos}\n      user: [prod_user]\n      password: [prod_password]  # required if method is ldap or kerberos\n      database: [database name]\n      host: [hostname]\n      port: [port number]\n      schema: [prod_schema]\n      threads: [1 or more]\n\n  target: dev\n"
  },
  {
    "path": "dev_requirements.txt",
    "content": "dbt-tests-adapter~=1.19.1\nmypy==1.19.1  # patch updates have historically introduced breaking changes\npre-commit~=4.3\npytest~=8.4\ntox~=4.30\n"
  },
  {
    "path": "docker/init_starburst.bash",
    "content": "#!/bin/bash\n\n# move to wherever we are so docker things work\ncd \"$(dirname \"${BASH_SOURCE[0]}\")\"\ncd ..\n\nset -exo pipefail\n\ndocker compose -f docker-compose-starburst.yml build\ndocker compose -f docker-compose-starburst.yml up -d --quiet-pull\ntimeout 5m bash -c -- 'while ! docker compose -f docker-compose-starburst.yml logs trino 2>&1 | tail -n 1 | grep \"SERVER STARTED\"; do sleep 2; done'\n"
  },
  {
    "path": "docker/init_trino.bash",
    "content": "#!/bin/bash\n\n# move to wherever we are so docker things work\ncd \"$(dirname \"${BASH_SOURCE[0]}\")\"\ncd ..\n\nset -exo pipefail\n\ndocker compose -f docker-compose-trino.yml build\ndocker compose -f docker-compose-trino.yml up -d --quiet-pull\ntimeout 5m bash -c -- 'while ! docker compose -f docker-compose-trino.yml logs trino 2>&1 | tail -n 1 | grep \"SERVER STARTED\"; do sleep 2; done'\n"
  },
  {
    "path": "docker/remove_starburst.bash",
    "content": "#!/bin/bash\n\n# move to wherever we are so docker things work\ncd \"$(dirname \"${BASH_SOURCE[0]}\")\"\ncd ..\ndocker compose -f docker-compose-starburst.yml down\n"
  },
  {
    "path": "docker/remove_trino.bash",
    "content": "#!/bin/bash\n\n# move to wherever we are so docker things work\ncd \"$(dirname \"${BASH_SOURCE[0]}\")\"\ncd ..\ndocker compose -f docker-compose-trino.yml down\n"
  },
  {
    "path": "docker/starburst/catalog/delta.properties",
    "content": "connector.name=delta-lake\ndelta.enable-non-concurrent-writes=true\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\nhive.metastore.uri=thrift://hive-metastore:9083\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\ndelta.security=allow-all\n"
  },
  {
    "path": "docker/starburst/catalog/hive.properties",
    "content": "connector.name=hive\nhive.metastore.uri=thrift://hive-metastore:9083\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\nhive.security=sql-standard\n"
  },
  {
    "path": "docker/starburst/catalog/iceberg.properties",
    "content": "connector.name=iceberg\nhive.metastore.uri=thrift://hive-metastore:9083\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\niceberg.unique-table-location=true\n"
  },
  {
    "path": "docker/starburst/catalog/memory.properties",
    "content": "connector.name=memory\nmemory.max-data-per-node=128MB\n"
  },
  {
    "path": "docker/starburst/catalog/postgresql.properties",
    "content": "connector.name=postgresql\nconnection-url=jdbc:postgresql://postgres:5432/dbt-trino\nconnection-user=dbt-trino\nconnection-password=dbt-trino\n"
  },
  {
    "path": "docker/starburst/catalog/tpch.properties",
    "content": "connector.name=tpch\n"
  },
  {
    "path": "docker/starburst/etc/config.properties",
    "content": "coordinator=true\nnode-scheduler.include-coordinator=true\nhttp-server.http.port=8080\ndiscovery.uri=http://localhost:8080\n"
  },
  {
    "path": "docker/starburst/etc/jvm.config",
    "content": "-server\n-XX:InitialRAMPercentage=80\n-XX:MaxRAMPercentage=80\n-XX:G1HeapRegionSize=32M\n-XX:+ExplicitGCInvokesConcurrent\n-XX:+HeapDumpOnOutOfMemoryError\n-XX:+ExitOnOutOfMemoryError\n-XX:-OmitStackTraceInFastThrow\n-XX:ReservedCodeCacheSize=256M\n-XX:PerMethodRecompilationCutoff=10000\n-XX:PerBytecodeRecompilationCutoff=10000\n-Djdk.attach.allowAttachSelf=true\n-Djdk.nio.maxCachedBufferSize=2000000\n"
  },
  {
    "path": "docker/starburst/etc/node.properties",
    "content": "node.environment=docker\nnode.data-dir=/data/starburst\n"
  },
  {
    "path": "docker/trino/catalog/delta.properties",
    "content": "connector.name=delta-lake\ndelta.enable-non-concurrent-writes=true\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\nhive.metastore.uri=thrift://hive-metastore:9083\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\n"
  },
  {
    "path": "docker/trino/catalog/hive.properties",
    "content": "connector.name=hive\nhive.metastore.uri=thrift://hive-metastore:9083\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\nhive.security=sql-standard\n"
  },
  {
    "path": "docker/trino/catalog/iceberg.properties",
    "content": "connector.name=iceberg\nhive.metastore.uri=thrift://hive-metastore:9083\nfs.native-s3.enabled=true\ns3.region=us-east-1\ns3.endpoint=http://minio:9000\ns3.path-style-access=true\ns3.aws-access-key=minio\ns3.aws-secret-key=minio123\nhive.metastore-cache-ttl=0s\nhive.metastore-refresh-interval=5s\n"
  },
  {
    "path": "docker/trino/catalog/memory.properties",
    "content": "connector.name=memory\nmemory.max-data-per-node=128MB\n"
  },
  {
    "path": "docker/trino/catalog/postgresql.properties",
    "content": "connector.name=postgresql\nconnection-url=jdbc:postgresql://postgres:5432/dbt-trino\nconnection-user=dbt-trino\nconnection-password=dbt-trino\n"
  },
  {
    "path": "docker/trino/catalog/tpch.properties",
    "content": "connector.name=tpch\n"
  },
  {
    "path": "docker/trino/etc/config.properties",
    "content": "coordinator=true\nnode-scheduler.include-coordinator=true\nhttp-server.http.port=8080\ndiscovery.uri=http://localhost:8080\n"
  },
  {
    "path": "docker/trino/etc/jvm.config",
    "content": "-server\n-XX:InitialRAMPercentage=80\n-XX:MaxRAMPercentage=80\n-XX:G1HeapRegionSize=32M\n-XX:+ExplicitGCInvokesConcurrent\n-XX:+HeapDumpOnOutOfMemoryError\n-XX:+ExitOnOutOfMemoryError\n-XX:-OmitStackTraceInFastThrow\n-XX:ReservedCodeCacheSize=256M\n-XX:PerMethodRecompilationCutoff=10000\n-XX:PerBytecodeRecompilationCutoff=10000\n-Djdk.attach.allowAttachSelf=true\n-Djdk.nio.maxCachedBufferSize=2000000\n"
  },
  {
    "path": "docker/trino/etc/node.properties",
    "content": "node.environment=docker\nnode.data-dir=/data/trino\n"
  },
  {
    "path": "docker-compose-starburst.yml",
    "content": "services:\n  trino:\n    ports:\n      - \"8080:8080\"\n    image: \"starburstdata/starburst-enterprise:477-e.1\"\n    volumes:\n      - ./docker/starburst/etc:/etc/starburst\n      - ./docker/starburst/catalog:/etc/starburst/catalog\n    environment:\n      - _JAVA_OPTIONS=-Dfile.encoding=UTF-8\n\n  postgres:\n    ports:\n      - \"5432:5432\"\n    image: postgres:18\n    environment:\n      POSTGRES_USER: dbt-trino\n      POSTGRES_PASSWORD: dbt-trino\n\n  metastore_db:\n    image: postgres:18\n    hostname: metastore_db\n    environment:\n      POSTGRES_USER: hive\n      POSTGRES_PASSWORD: hive\n      POSTGRES_DB: metastore\n\n  hive-metastore:\n    hostname: hive-metastore\n    image: 'starburstdata/hive:3.1.3-e.15'\n    ports:\n      - '9083:9083' # Metastore Thrift\n    environment:\n      HIVE_METASTORE_DRIVER: org.postgresql.Driver\n      HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore\n      HIVE_METASTORE_USER: hive\n      HIVE_METASTORE_PASSWORD: hive\n      HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/\n      S3_ENDPOINT: http://minio:9000\n      S3_ACCESS_KEY: minio\n      S3_SECRET_KEY: minio123\n      S3_PATH_STYLE_ACCESS: \"true\"\n      REGION: \"\"\n      GOOGLE_CLOUD_KEY_FILE_PATH: \"\"\n      AZURE_ADL_CLIENT_ID: \"\"\n      AZURE_ADL_CREDENTIAL: \"\"\n      AZURE_ADL_REFRESH_URL: \"\"\n      AZURE_ABFS_STORAGE_ACCOUNT: \"\"\n      AZURE_ABFS_ACCESS_KEY: \"\"\n      AZURE_WASB_STORAGE_ACCOUNT: \"\"\n      AZURE_ABFS_OAUTH: \"\"\n      AZURE_ABFS_OAUTH_TOKEN_PROVIDER: \"\"\n      AZURE_ABFS_OAUTH_CLIENT_ID: \"\"\n      AZURE_ABFS_OAUTH_SECRET: \"\"\n      AZURE_ABFS_OAUTH_ENDPOINT: \"\"\n      AZURE_WASB_ACCESS_KEY: \"\"\n      HIVE_METASTORE_USERS_IN_ADMIN_ROLE: \"admin\"\n    depends_on:\n      - metastore_db\n\n  minio:\n    hostname: minio\n    image: 'minio/minio:RELEASE.2025-09-07T16-13-09Z'\n    container_name: minio\n    ports:\n      - '9000:9000'\n      - '9001:9001'\n    environment:\n      MINIO_ACCESS_KEY: minio\n      MINIO_SECRET_KEY: minio123\n    command: server /data --console-address \":9001\"\n\n  # This job will create the \"datalake\" bucket on Minio\n  mc-job:\n    image: 'minio/mc:RELEASE.2025-04-16T18-13-26Z'\n    entrypoint: |\n      /bin/bash -c \"\n      sleep 5;\n      /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123;\n      /usr/bin/mc mb --quiet myminio/datalake\n      \"\n    depends_on:\n      - minio\n\nnetworks:\n  default:\n    name: dbt-net\n    external: true\n"
  },
  {
    "path": "docker-compose-trino.yml",
    "content": "services:\n  trino:\n    ports:\n      - \"8080:8080\"\n    image: \"trinodb/trino:478\"\n    volumes:\n      - ./docker/trino/etc:/usr/lib/trino/etc:ro\n      - ./docker/trino/catalog:/etc/trino/catalog\n\n  postgres:\n    ports:\n      - \"5432:5432\"\n    image: postgres:18\n    container_name: postgres\n    environment:\n      POSTGRES_USER: dbt-trino\n      POSTGRES_PASSWORD: dbt-trino\n\n  metastore_db:\n    image: postgres:18\n    hostname: metastore_db\n    environment:\n      POSTGRES_USER: hive\n      POSTGRES_PASSWORD: hive\n      POSTGRES_DB: metastore\n\n  hive-metastore:\n    hostname: hive-metastore\n    image: 'starburstdata/hive:3.1.3-e.15'\n    ports:\n      - '9083:9083' # Metastore Thrift\n    environment:\n      HIVE_METASTORE_DRIVER: org.postgresql.Driver\n      HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore\n      HIVE_METASTORE_USER: hive\n      HIVE_METASTORE_PASSWORD: hive\n      HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/\n      S3_ENDPOINT: http://minio:9000\n      S3_ACCESS_KEY: minio\n      S3_SECRET_KEY: minio123\n      S3_PATH_STYLE_ACCESS: \"true\"\n      REGION: \"\"\n      GOOGLE_CLOUD_KEY_FILE_PATH: \"\"\n      AZURE_ADL_CLIENT_ID: \"\"\n      AZURE_ADL_CREDENTIAL: \"\"\n      AZURE_ADL_REFRESH_URL: \"\"\n      AZURE_ABFS_STORAGE_ACCOUNT: \"\"\n      AZURE_ABFS_ACCESS_KEY: \"\"\n      AZURE_WASB_STORAGE_ACCOUNT: \"\"\n      AZURE_ABFS_OAUTH: \"\"\n      AZURE_ABFS_OAUTH_TOKEN_PROVIDER: \"\"\n      AZURE_ABFS_OAUTH_CLIENT_ID: \"\"\n      AZURE_ABFS_OAUTH_SECRET: \"\"\n      AZURE_ABFS_OAUTH_ENDPOINT: \"\"\n      AZURE_WASB_ACCESS_KEY: \"\"\n      HIVE_METASTORE_USERS_IN_ADMIN_ROLE: \"admin\"\n    depends_on:\n      - metastore_db\n\n  minio:\n    hostname: minio\n    image: 'minio/minio:RELEASE.2025-09-07T16-13-09Z'\n    container_name: minio\n    ports:\n      - '9000:9000'\n      - '9001:9001'\n    environment:\n      MINIO_ACCESS_KEY: minio\n      MINIO_SECRET_KEY: minio123\n    command: server /data --console-address \":9001\"\n\n  # This job will create the \"datalake\" bucket on Minio\n  mc-job:\n    image: 'minio/mc:RELEASE.2025-04-16T18-13-26Z'\n    entrypoint: |\n      /bin/bash -c \"\n      sleep 5;\n      /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123;\n      /usr/bin/mc mb --quiet myminio/datalake\n      \"\n    depends_on:\n      - minio\n\nnetworks:\n  default:\n    name: dbt-net\n    external: true\n"
  },
  {
    "path": "mypy.ini",
    "content": "[mypy]\nnamespace_packages = True\nexplicit_package_bases = True\n"
  },
  {
    "path": "pytest.ini",
    "content": "[pytest]\nfilterwarnings =\n    ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning\n    ignore:unclosed file .*:ResourceWarning\ntestpaths =\n    tests/unit\n    tests/functional\nmarkers =\n    delta\n    iceberg\n    hive\n    postgresql\n    prepared_statements_disabled\n    skip_profile(profile)\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python\nimport os\nimport re\nimport sys\n\n# require python 3.9 or newer\nif sys.version_info < (3, 9):\n    print(\"Error: dbt does not support this version of Python.\")\n    print(\"Please upgrade to Python 3.9 or higher.\")\n    sys.exit(1)\n\n\n# require version of setuptools that supports find_namespace_packages\nfrom setuptools import setup\n\ntry:\n    from setuptools import find_namespace_packages\nexcept ImportError:\n    # the user has a downlevel version of setuptools.\n    print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n    print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n    sys.exit(1)\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n    long_description = f.read()\n\n\npackage_name = \"dbt-trino\"\n\n\n# get this package's version from dbt/adapters/<name>/__version__.py\ndef _get_plugin_version_dict():\n    _version_path = os.path.join(this_directory, \"dbt\", \"adapters\", \"trino\", \"__version__.py\")\n    _semver = r\"\"\"(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)\"\"\"\n    _pre = r\"\"\"((?P<prekind>a|b|rc)(?P<pre>\\d+))?\"\"\"\n    _version_pattern = rf\"\"\"version\\s*=\\s*[\"']{_semver}{_pre}[\"']\"\"\"\n    with open(_version_path) as f:\n        match = re.search(_version_pattern, f.read().strip())\n        if match is None:\n            raise ValueError(f\"invalid version at {_version_path}\")\n        return match.groupdict()\n\n\ndef _dbt_trino_version():\n    parts = _get_plugin_version_dict()\n    trino_version = \"{major}.{minor}.{patch}\".format(**parts)\n    if parts[\"prekind\"] and parts[\"pre\"]:\n        trino_version += parts[\"prekind\"] + parts[\"pre\"]\n    return trino_version\n\n\npackage_version = _dbt_trino_version()\ndescription = \"\"\"The trino adapter plugin for dbt (data build tool)\"\"\"\n\nsetup(\n    name=package_name,\n    version=package_version,\n    description=description,\n    long_description=long_description,\n    long_description_content_type=\"text/markdown\",\n    platforms=\"any\",\n    license=\"Apache License 2.0\",\n    license_files=(\"LICENSE.txt\",),\n    author=\"Starburst Data\",\n    author_email=\"info@starburstdata.com\",\n    url=\"https://github.com/starburstdata/dbt-trino\",\n    packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n    package_data={\n        \"dbt\": [\n            \"include/trino/dbt_project.yml\",\n            \"include/trino/sample_profiles.yml\",\n            \"include/trino/macros/*.sql\",\n            \"include/trino/macros/*/*.sql\",\n            \"include/trino/macros/*/*/*.sql\",\n        ]\n    },\n    install_requires=[\n        \"dbt-common>=1.25.0,<2.0\",\n        \"dbt-adapters>=1.16,<2.0\",\n        \"trino~=0.331\",\n        # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency\n        \"dbt-core>=1.8.0\",\n    ],\n    zip_safe=False,\n    classifiers=[\n        \"Development Status :: 5 - Production/Stable\",\n        \"License :: OSI Approved :: Apache Software License\",\n        \"Operating System :: Microsoft :: Windows\",\n        \"Operating System :: MacOS :: MacOS X\",\n        \"Operating System :: POSIX :: Linux\",\n        \"Programming Language :: Python :: 3.9\",\n        \"Programming Language :: Python :: 3.10\",\n        \"Programming Language :: Python :: 3.11\",\n        \"Programming Language :: Python :: 3.12\",\n        \"Programming Language :: Python :: 3.13\",\n    ],\n    python_requires=\">=3.9\",\n)\n"
  },
  {
    "path": "tests/conftest.py",
    "content": "import os\n\nimport pytest\nimport trino\n\n# Import the functional fixtures as a plugin\n# Note: fixtures with session scope need to be local\n\npytest_plugins = [\"dbt.tests.fixtures.project\"]\n\n\ndef pytest_addoption(parser):\n    parser.addoption(\"--profile\", action=\"store\", default=\"trino_starburst\", type=str)\n\n\n# Skip tests for profiles marked with @pytest.mark.skip_profile\n# See pytest docs for skipping based on command-line options:\n# https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option\ndef pytest_collection_modifyitems(config, items):\n    profile_type = config.getoption(\"--profile\")\n    for item in items:\n        if skip_profile_marker := item.get_closest_marker(\"skip_profile\"):\n            if profile_type in skip_profile_marker.args:\n                skip_profile = pytest.mark.skip(reason=f\"skipped on {profile_type} profile\")\n                item.add_marker(skip_profile)\n\n\n# The profile dictionary, used to write out profiles.yml\n@pytest.fixture(scope=\"class\")\ndef dbt_profile_target(request):\n    profile_type = request.config.getoption(\"--profile\")\n    if profile_type == \"trino_starburst\":\n        target = get_trino_starburst_target()\n    elif profile_type == \"starburst_galaxy\":\n        target = get_galaxy_target()\n    else:\n        raise ValueError(f\"Invalid profile type '{profile_type}'\")\n\n    prepared_statements_disabled = request.node.get_closest_marker(\"prepared_statements_disabled\")\n    if prepared_statements_disabled:\n        target.update({\"prepared_statements_enabled\": False})\n\n    postgresql = request.node.get_closest_marker(\"postgresql\")\n    iceberg = request.node.get_closest_marker(\"iceberg\")\n    delta = request.node.get_closest_marker(\"delta\")\n    hive = request.node.get_closest_marker(\"hive\")\n\n    if sum(bool(x) for x in (postgresql, iceberg, delta)) > 1:\n        raise ValueError(\"Only one of postgresql, iceberg, delta can be specified as a marker\")\n\n    if postgresql:\n        target.update({\"catalog\": \"postgresql\"})\n\n    if delta:\n        target.update({\"catalog\": \"delta\"})\n\n    if iceberg:\n        target.update({\"catalog\": \"iceberg\"})\n\n    if hive:\n        target.update({\"catalog\": \"hive\"})\n\n    return target\n\n\ndef get_trino_starburst_target():\n    return {\n        \"type\": \"trino\",\n        \"method\": \"none\",\n        \"threads\": 4,\n        \"host\": \"localhost\",\n        \"port\": 8080,\n        \"user\": \"admin\",\n        \"password\": \"\",\n        \"roles\": {\n            \"hive\": \"admin\",\n        },\n        \"catalog\": \"memory\",\n        \"schema\": \"default\",\n        \"timezone\": \"UTC\",\n    }\n\n\ndef get_galaxy_target():\n    return {\n        \"type\": \"trino\",\n        \"method\": \"ldap\",\n        \"threads\": 4,\n        \"retries\": 5,\n        \"host\": os.environ.get(\"DBT_TESTS_STARBURST_GALAXY_HOST\"),\n        \"port\": 443,\n        \"user\": os.environ.get(\"DBT_TESTS_STARBURST_GALAXY_USER\"),\n        \"password\": os.environ.get(\"DBT_TESTS_STARBURST_GALAXY_PASSWORD\"),\n        \"catalog\": \"iceberg\",\n        \"schema\": \"default\",\n        \"timezone\": \"UTC\",\n    }\n\n\n@pytest.fixture(scope=\"class\")\ndef trino_connection(dbt_profile_target):\n    if dbt_profile_target[\"method\"] == \"ldap\":\n        return trino.dbapi.connect(\n            host=dbt_profile_target[\"host\"],\n            port=dbt_profile_target[\"port\"],\n            auth=trino.auth.BasicAuthentication(\n                dbt_profile_target[\"user\"], dbt_profile_target[\"password\"]\n            ),\n            catalog=dbt_profile_target[\"catalog\"],\n            schema=dbt_profile_target[\"schema\"],\n            http_scheme=\"https\",\n        )\n    else:\n        return trino.dbapi.connect(\n            host=dbt_profile_target[\"host\"],\n            port=dbt_profile_target[\"port\"],\n            user=dbt_profile_target[\"user\"],\n            catalog=dbt_profile_target[\"catalog\"],\n            schema=dbt_profile_target[\"schema\"],\n        )\n\n\ndef get_engine_type(trino_connection):\n    conn = trino_connection\n    if \"galaxy.starburst.io\" in conn.host:\n        return \"starburst_galaxy\"\n    cur = conn.cursor()\n    cur.execute(\"SELECT version()\")\n    version = cur.fetchone()\n    if \"-e\" in version[0]:\n        return \"starburst_enterprise\"\n    else:\n        return \"trino\"\n\n\n@pytest.fixture(autouse=True)\ndef skip_by_engine_type(request, trino_connection):\n    engine_type = get_engine_type(trino_connection)\n    if request.node.get_closest_marker(\"skip_engine\"):\n        for skip_engine_type in request.node.get_closest_marker(\"skip_engine\").args:\n            if skip_engine_type == engine_type:\n                pytest.skip(f\"skipped on {engine_type} engine\")\n"
  },
  {
    "path": "tests/functional/adapter/behavior_flags/test_require_certificate_validation.py",
    "content": "import warnings\n\nimport pytest\nfrom dbt.tests.util import run_dbt, run_dbt_and_capture\nfrom urllib3.exceptions import InsecureRequestWarning\n\n\nclass TestRequireCertificateValidationDefault:\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"flags\": {}}\n\n    def test_cert_default_value(self, project):\n        assert project.adapter.connections.profile.credentials.cert is None\n\n    def test_require_certificate_validation_logs(self, project):\n        dbt_args = [\"show\", \"--inline\", \"select 1\"]\n        _, logs = run_dbt_and_capture(dbt_args)\n        assert \"It is strongly advised to enable `require_certificate_validation` flag\" in logs\n\n    @pytest.mark.skip_profile(\"trino_starburst\")\n    def test_require_certificate_validation_insecure_request_warning(self, project):\n        with warnings.catch_warnings(record=True) as w:\n            dbt_args = [\"show\", \"--inline\", \"select 1\"]\n            run_dbt(dbt_args)\n\n            # Check if any InsecureRequestWarning was raised\n            assert any(\n                issubclass(warning.category, InsecureRequestWarning) for warning in w\n            ), \"InsecureRequestWarning was not raised\"\n\n\nclass TestRequireCertificateValidationFalse:\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"flags\": {\"require_certificate_validation\": False}}\n\n    def test_cert_default_value(self, project):\n        assert project.adapter.connections.profile.credentials.cert is None\n\n    def test_require_certificate_validation_logs(self, project):\n        dbt_args = [\"show\", \"--inline\", \"select 1\"]\n        _, logs = run_dbt_and_capture(dbt_args)\n        assert \"It is strongly advised to enable `require_certificate_validation` flag\" in logs\n\n    @pytest.mark.skip_profile(\"trino_starburst\")\n    def test_require_certificate_validation_insecure_request_warning(self, project):\n        with warnings.catch_warnings(record=True) as w:\n            dbt_args = [\"show\", \"--inline\", \"select 1\"]\n            run_dbt(dbt_args)\n\n            # Check if any InsecureRequestWarning was raised\n            assert any(\n                issubclass(warning.category, InsecureRequestWarning) for warning in w\n            ), \"InsecureRequestWarning was not raised\"\n\n\nclass TestRequireCertificateValidationTrue:\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"flags\": {\"require_certificate_validation\": True}}\n\n    def test_cert_default_value(self, project):\n        assert project.adapter.connections.profile.credentials.cert is True\n\n    def test_require_certificate_validation_logs(self, project):\n        dbt_args = [\"show\", \"--inline\", \"select 1\"]\n        _, logs = run_dbt_and_capture(dbt_args)\n        assert \"It is strongly advised to enable `require_certificate_validation` flag\" not in logs\n\n    @pytest.mark.skip_profile(\"trino_starburst\")\n    def test_require_certificate_validation_insecure_request_warning(self, project):\n        with warnings.catch_warnings(record=True) as w:\n            dbt_args = [\"show\", \"--inline\", \"select 1\"]\n            run_dbt(dbt_args)\n\n            # Check if not any InsecureRequestWarning was raised\n            assert not any(\n                issubclass(warning.category, InsecureRequestWarning) for warning in w\n            ), \"InsecureRequestWarning was not raised\"\n"
  },
  {
    "path": "tests/functional/adapter/catalog_integrations/fixtures.py",
    "content": "MODEL_WITHOUT_CATALOG = \"\"\"\n{{ config(\n    materialized='table',\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog'\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    table_format='delta',\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    file_format='parquet',\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_LOCATION = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    storage_uri='s3://datalake/storage_uri',\n    properties= {\n        'location': \"'s3://datalake/location'\",\n    }\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_STORAGE_URI = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    storage_uri='s3://datalake/storage_uri',\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    base_location_root='foo',\n    base_location_subpath='bar',\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    base_location_root=None,\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n\nMODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT = \"\"\"\n{{ config(\n    materialized='table',\n    catalog_name='test_trino_catalog',\n    base_location_root=None,\n    omit_base_location_root=true,\n) }}\n\nselect 1 as id, 'test' as name\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/catalog_integrations/test_catalog_integration.py",
    "content": "import pytest\nfrom dbt.tests.adapter.catalog_integrations.test_catalog_integration import (\n    BaseCatalogIntegrationValidation,\n)\nfrom dbt.tests.util import run_dbt_and_capture, write_file\n\nfrom tests.functional.adapter.catalog_integrations.fixtures import (\n    MODEL_WITH_CATALOG,\n    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION,\n    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE,\n    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT,\n    MODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT,\n    MODEL_WITH_CATALOG_CONFIGS_LOCATION,\n    MODEL_WITH_CATALOG_CONFIGS_STORAGE_URI,\n    MODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT,\n    MODEL_WITHOUT_CATALOG,\n)\n\n\n@pytest.mark.iceberg\nclass TestTrinoCatalogIntegrationFileFormat(BaseCatalogIntegrationValidation):\n    @pytest.fixture(scope=\"class\")\n    def catalogs(self):\n        return {\n            \"catalogs\": [\n                {\n                    \"name\": \"test_trino_catalog\",\n                    \"active_write_integration\": \"trino_integration\",\n                    \"write_integrations\": [\n                        {\n                            \"name\": \"trino_integration\",\n                            \"catalog_type\": \"trino\",\n                            \"file_format\": \"orc\",\n                        }\n                    ],\n                }\n            ]\n        }\n\n    def test_model_without_catalog(self, project):\n        # Create model with catalog configuration\n        write_file(MODEL_WITHOUT_CATALOG, project.project_root, \"models\", \"test_model.sql\")\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" not in logs\n\n    def test_model_with_catalog(self, project):\n        # Create model with catalog configuration\n        write_file(MODEL_WITH_CATALOG, project.project_root, \"models\", \"test_model.sql\")\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"format = 'orc'\" in logs\n\n    def test_model_with_catalog_configs_file_format(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"format = 'parquet'\" in logs\n\n\n@pytest.mark.iceberg\n# Setting `type` property is available only in Starburst Galaxy\n# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html\n@pytest.mark.skip_profile(\"trino_starburst\")\nclass TestMyAdapterCatalogIntegration(BaseCatalogIntegrationValidation):\n    @pytest.fixture(scope=\"class\")\n    def catalogs(self):\n        return {\n            \"catalogs\": [\n                {\n                    \"name\": \"test_trino_catalog\",\n                    \"active_write_integration\": \"trino_integration\",\n                    \"write_integrations\": [\n                        {\n                            \"name\": \"trino_integration\",\n                            \"catalog_type\": \"trino\",\n                            \"table_format\": \"iceberg\",\n                        }\n                    ],\n                }\n            ]\n        }\n\n    def test_model_with_catalog(self, project):\n        # Create model with catalog configuration\n        write_file(MODEL_WITH_CATALOG, project.project_root, \"models\", \"test_model.sql\")\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"type = 'iceberg'\" in logs\n\n    def test_model_with_catalog_configs_table_format(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"type = 'delta'\" in logs\n\n\n@pytest.mark.iceberg\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestTrinoCatalogIntegrationLocation(BaseCatalogIntegrationValidation):\n    @pytest.fixture(scope=\"class\")\n    def catalogs(self):\n        return {\n            \"catalogs\": [\n                {\n                    \"name\": \"test_trino_catalog\",\n                    \"active_write_integration\": \"trino_integration\",\n                    \"write_integrations\": [\n                        {\n                            \"name\": \"trino_integration\",\n                            \"catalog_type\": \"trino\",\n                            \"external_volume\": \"s3://datalake\",\n                        }\n                    ],\n                }\n            ]\n        }\n\n    def test_model_with_catalog(self, project):\n        # Create model with catalog configuration\n        write_file(MODEL_WITH_CATALOG, project.project_root, \"models\", \"test_model.sql\")\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert f\"location = 's3://datalake/_dbt/{project.test_schema}/test_model'\" in logs\n\n    def test_model_with_catalog_configs_location(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_LOCATION, project.project_root, \"models\", \"test_model.sql\"\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"location = 's3://datalake/location'\" in logs\n\n    def test_model_with_catalog_configs_storage_uri(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_STORAGE_URI,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert \"location = 's3://datalake/storage_uri'\" in logs\n\n    def test_model_with_catalog_configs_base_location(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert f\"location = 's3://datalake/foo/{project.test_schema}/test_model/bar'\" in logs\n\n    def test_model_with_catalog_configs_base_location_none(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert f\"location = 's3://datalake/_dbt/{project.test_schema}/test_model'\" in logs\n\n    def test_model_with_catalog_configs_base_location_none_omit_base_location_root(self, project):\n        # Create model with catalog configuration\n        write_file(\n            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT,\n            project.project_root,\n            \"models\",\n            \"test_model.sql\",\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"CREATE TABLE\" in logs\n        assert \"WITH (\" in logs\n        assert f\"location = 's3://datalake/{project.test_schema}/test_model'\" in logs\n"
  },
  {
    "path": "tests/functional/adapter/column_types/fixtures.py",
    "content": "model_sql = \"\"\"\nselect\n    cast(0 as tinyint) as tinyint_col,\n    cast(1 as smallint) as smallint_col,\n    cast(2 as integer) as integer_col,\n    cast(2 as int) as int_col,\n    cast(3 as bigint) as bigint_col,\n    cast(4.0 as real) as real_col,\n    cast(5.0 as double) as double_col,\n    cast(5.5 as double precision) as double_precision_col,\n    cast(6.0 as decimal) as decimal_col,\n    cast('7' as char) as char_col,\n    cast('8' as varchar(20)) as varchar_col\n\"\"\"\n\nschema_yml = \"\"\"\nversion: 2\nmodels:\n  - name: model\n    tests:\n      - is_type:\n          column_map:\n            tinyint_col: ['integer', 'number']\n            smallint_col: ['integer', 'number']\n            integer_col: ['integer', 'number']\n            int_col: ['integer', 'number']\n            bigint_col: ['integer', 'number']\n            real_col: ['float', 'number']\n            double_col: ['float', 'number']\n            double_precision_col: ['float', 'number']\n            decimal_col: ['numeric', 'number']\n            char_col: ['string', 'not number']\n            varchar_col: ['string', 'not number']\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/column_types/test_column_types.py",
    "content": "import pytest\nfrom dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes\n\nfrom tests.functional.adapter.column_types.fixtures import model_sql, schema_yml\n\n\nclass TestTrinoColumnTypes(BaseColumnTypes):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\"model.sql\": model_sql, \"schema.yml\": schema_yml}\n\n    def test_run_and_test(self, project):\n        self.run_and_test()\n"
  },
  {
    "path": "tests/functional/adapter/constraints/fixtures.py",
    "content": "trino_model_contract_sql_header_sql = \"\"\"\n{{\n  config(\n    materialized = \"table\"\n  )\n}}\n\n{% call set_sql_header(config) %}\nset time zone 'Asia/Kolkata';\n{%- endcall %}\nselect current_timezone() as column_name\n\"\"\"\n\ntrino_model_incremental_contract_sql_header_sql = \"\"\"\n{{\n  config(\n    materialized = \"incremental\",\n    on_schema_change=\"append_new_columns\"\n  )\n}}\n\n{% call set_sql_header(config) %}\nset time zone 'Asia/Kolkata';\n{%- endcall %}\nselect current_timezone() as column_name\n\"\"\"\n\ntrino_model_schema_yml = \"\"\"\nversion: 2\nmodels:\n  - name: my_model\n    config:\n      contract:\n        enforced: true\n    columns:\n      - name: id\n        quote: true\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n          - type: check\n            expression: (id > 0)\n        tests:\n          - unique\n      - name: color\n        data_type: varchar\n      - name: date_day\n        data_type: varchar\n  - name: my_model_error\n    config:\n      contract:\n        enforced: true\n    columns:\n      - name: id\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n          - type: check\n            expression: (id > 0)\n        tests:\n          - unique\n      - name: color\n        data_type: varchar\n      - name: date_day\n        data_type: varchar\n  - name: my_model_wrong_order\n    config:\n      contract:\n        enforced: true\n    columns:\n      - name: id\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n          - type: check\n            expression: (id > 0)\n        tests:\n          - unique\n      - name: color\n        data_type: varchar\n      - name: date_day\n        data_type: varchar\n  - name: my_model_wrong_name\n    config:\n      contract:\n        enforced: true\n    columns:\n      - name: id\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n          - type: check\n            expression: (id > 0)\n        tests:\n          - unique\n      - name: color\n        data_type: varchar\n      - name: date_day\n        data_type: varchar\n\"\"\"\n\ntrino_constrained_model_schema_yml = \"\"\"\nversion: 2\nmodels:\n  - name: my_model\n    config:\n      contract:\n        enforced: true\n    constraints:\n      - type: check\n        expression: (id > 0)\n      - type: primary_key\n        columns: [ id ]\n      - type: unique\n        columns: [ color, date_day ]\n        name: strange_uniqueness_requirement\n    columns:\n      - name: id\n        quote: true\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n        tests:\n          - unique\n      - name: color\n        data_type: varchar\n      - name: date_day\n        data_type: varchar\n\"\"\"\n\ntrino_model_quoted_column_schema_yml = \"\"\"\nversion: 2\nmodels:\n  - name: my_model\n    config:\n      contract:\n        enforced: true\n      materialized: table\n    constraints:\n      - type: check\n        # this one is the on the user\n        expression: (\"from\" = 'blue')\n        columns: [ '\"from\"' ]\n    columns:\n      - name: id\n        data_type: integer\n        description: hello\n        constraints:\n          - type: not_null\n        tests:\n          - unique\n      - name: from  # reserved word\n        quote: true\n        data_type: varchar\n        constraints:\n          - type: not_null\n      - name: date_day\n        data_type: varchar\n\"\"\"\n\ntrino_model_contract_header_schema_yml = \"\"\"\nversion: 2\nmodels:\n  - name: my_model_contract_sql_header\n    config:\n      contract:\n        enforced: true\n    columns:\n      - name: column_name\n        data_type: varchar\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/constraints/test_constraints.py",
    "content": "import pytest\nfrom dbt.tests.adapter.constraints.fixtures import (\n    my_incremental_model_sql,\n    my_model_incremental_wrong_name_sql,\n    my_model_incremental_wrong_order_sql,\n    my_model_sql,\n    my_model_view_wrong_name_sql,\n    my_model_view_wrong_order_sql,\n    my_model_with_quoted_column_name_sql,\n    my_model_wrong_name_sql,\n    my_model_wrong_order_sql,\n)\nfrom dbt.tests.adapter.constraints.test_constraints import (\n    BaseConstraintQuotedColumn,\n    BaseConstraintsRollback,\n    BaseConstraintsRuntimeDdlEnforcement,\n    BaseIncrementalConstraintsColumnsEqual,\n    BaseIncrementalConstraintsRollback,\n    BaseIncrementalConstraintsRuntimeDdlEnforcement,\n    BaseIncrementalContractSqlHeader,\n    BaseModelConstraintsRuntimeEnforcement,\n    BaseTableConstraintsColumnsEqual,\n    BaseTableContractSqlHeader,\n    BaseViewConstraintsColumnsEqual,\n)\n\nfrom tests.functional.adapter.constraints.fixtures import (\n    trino_constrained_model_schema_yml,\n    trino_model_contract_header_schema_yml,\n    trino_model_contract_sql_header_sql,\n    trino_model_incremental_contract_sql_header_sql,\n    trino_model_quoted_column_schema_yml,\n    trino_model_schema_yml,\n)\n\n_expected_sql_trino = \"\"\"\ncreate table <model_identifier> (\n    \"id\" integer not null,\n    color varchar,\n    date_day varchar\n) ;\ninsert into <model_identifier>\n(\n    select\n        \"id\",\n        color,\n        date_day from\n    (\n        select\n            'blue' as color,\n            1 as id,\n            '2019-01-01' as date_day\n    ) as model_subq\n)\n;\n\"\"\"\n\n\nclass TrinoColumnEqualSetup:\n    @pytest.fixture\n    def string_type(self):\n        return \"VARCHAR\"\n\n    @pytest.fixture\n    def data_types(self, schema_int_type, int_type, string_type):\n        # sql_column_value, schema_data_type, error_data_type\n        return [\n            [\"1\", schema_int_type, int_type],\n            [\"'1'\", string_type, string_type],\n            [\"cast('2019-01-01' as date)\", \"date\", \"DATE\"],\n            [\"true\", \"boolean\", \"BOOLEAN\"],\n            [\"cast('2013-11-03 00:00:00-07' as TIMESTAMP)\", \"timestamp(6)\", \"TIMESTAMP\"],\n            [\n                \"cast('2013-11-03 00:00:00-07' as TIMESTAMP WITH TIME ZONE)\",\n                \"timestamp(6)\",\n                \"TIMESTAMP\",\n            ],\n            [\"ARRAY['a','b','c']\", \"ARRAY(VARCHAR)\", \"ARRAY\"],\n            [\"ARRAY[1,2,3]\", \"ARRAY(INTEGER)\", \"ARRAY\"],\n            [\"cast('1' as DECIMAL)\", \"DECIMAL\", \"DECIMAL\"],\n        ]\n\n\n@pytest.mark.iceberg\nclass TestTrinoTableConstraintsColumnsEqual(\n    TrinoColumnEqualSetup, BaseTableConstraintsColumnsEqual\n):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model_wrong_order.sql\": my_model_wrong_order_sql,\n            \"my_model_wrong_name.sql\": my_model_wrong_name_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n\nclass TestTrinoViewConstraintsColumnsEqual(TrinoColumnEqualSetup, BaseViewConstraintsColumnsEqual):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model_wrong_order.sql\": my_model_view_wrong_order_sql,\n            \"my_model_wrong_name.sql\": my_model_view_wrong_name_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n\n@pytest.mark.iceberg\nclass TestTrinoIncrementalConstraintsColumnsEqual(\n    TrinoColumnEqualSetup, BaseIncrementalConstraintsColumnsEqual\n):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model_wrong_order.sql\": my_model_incremental_wrong_order_sql,\n            \"my_model_wrong_name.sql\": my_model_incremental_wrong_name_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n\n@pytest.mark.iceberg\nclass TestTrinoTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_model_wrong_order_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_sql(self):\n        return _expected_sql_trino\n\n\n@pytest.mark.iceberg\nclass TestTrinoTableConstraintsRollback(BaseConstraintsRollback):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_model_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_error_messages(self):\n        return [\"NULL value not allowed for NOT NULL column: id\"]\n\n\n@pytest.mark.iceberg\nclass TestTrinoIncrementalConstraintsRuntimeDdlEnforcement(\n    BaseIncrementalConstraintsRuntimeDdlEnforcement\n):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_model_incremental_wrong_order_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_sql(self):\n        return _expected_sql_trino\n\n\n@pytest.mark.iceberg\nclass TestTrinoIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_incremental_model_sql,\n            \"constraints_schema.yml\": trino_model_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_error_messages(self):\n        return [\"NULL value not allowed for NOT NULL column: id\"]\n\n\nclass TestTrinoTableContractSqlHeader(BaseTableContractSqlHeader):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model_contract_sql_header.sql\": trino_model_contract_sql_header_sql,\n            \"constraints_schema.yml\": trino_model_contract_header_schema_yml,\n        }\n\n\nclass TestTrinoIncrementalContractSqlHeader(BaseIncrementalContractSqlHeader):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model_contract_sql_header.sql\": trino_model_incremental_contract_sql_header_sql,\n            \"constraints_schema.yml\": trino_model_contract_header_schema_yml,\n        }\n\n\n@pytest.mark.iceberg\nclass TestTrinoModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_model_sql,\n            \"constraints_schema.yml\": trino_constrained_model_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_sql(self):\n        return \"\"\"\ncreate table <model_identifier> (\n    \"id\" integer not null,\n    color varchar,\n    date_day varchar\n) ;\ninsert into <model_identifier>\n(\n    select\n        \"id\",\n        color,\n        date_day from\n    (\n        select\n            1 as id,\n            'blue' as color,\n            '2019-01-01' as date_day\n    ) as model_subq\n)\n;\n\"\"\"\n\n\n@pytest.mark.iceberg\nclass TestTrinoConstraintQuotedColumn(BaseConstraintQuotedColumn):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_model.sql\": my_model_with_quoted_column_name_sql,\n            \"constraints_schema.yml\": trino_model_quoted_column_schema_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_sql(self):\n        return \"\"\"\ncreate table <model_identifier> (\n    id integer not null,\n    \"from\" varchar not null,\n    date_day varchar\n) ;\ninsert into <model_identifier>\n(\n    select id, \"from\", date_day\n    from (\n        select\n          'blue' as \"from\",\n          1 as id,\n          '2019-01-01' as date_day\n    ) as model_subq\n);\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/dbt_clone/test_dbt_clone.py",
    "content": "import pytest\nfrom dbt.tests.adapter.dbt_clone.fixtures import (\n    custom_can_clone_tables_false_macros_sql,\n    get_schema_name_sql,\n    infinite_macros_sql,\n    macros_sql,\n)\nfrom dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseCloneNotPossible\n\niceberg_macro_override_sql = \"\"\"\n{% macro trino__current_timestamp() -%}\n    current_timestamp(6)\n{%- endmacro %}\n\"\"\"\n\n\nclass TestTrinoCloneNotPossible(BaseCloneNotPossible):\n    @pytest.fixture(scope=\"class\")\n    def macros(self):\n        return {\n            \"macros.sql\": macros_sql,\n            \"my_can_clone_tables.sql\": custom_can_clone_tables_false_macros_sql,\n            \"infinite_macros.sql\": infinite_macros_sql,\n            \"get_schema_name.sql\": get_schema_name_sql,\n            \"iceberg.sql\": iceberg_macro_override_sql,\n        }\n\n    # TODO: below method probably should be implemented in base class (on dbt-core side)\n    @pytest.fixture(autouse=True)\n    def clean_up(self, project):\n        yield\n        with project.adapter.connection_named(\"__test\"):\n            relation = project.adapter.Relation.create(\n                database=project.database, schema=f\"{project.test_schema}_seeds\"\n            )\n            project.adapter.drop_schema(relation)\n\n            relation = project.adapter.Relation.create(\n                database=project.database, schema=project.test_schema\n            )\n            project.adapter.drop_schema(relation)\n"
  },
  {
    "path": "tests/functional/adapter/dbt_debug/test_dbt_debug.py",
    "content": "import pytest\nfrom dbt.tests.adapter.dbt_debug.test_dbt_debug import (\n    BaseDebug,\n    BaseDebugProfileVariable,\n)\nfrom dbt.tests.util import run_dbt\n\n\nclass TestDebugTrino(BaseDebug):\n    # TODO: below teardown method probably should be implemented in base class (on dbt-core side)\n    @pytest.fixture(scope=\"function\", autouse=True)\n    def teardown_method(self, project):\n        yield\n        project.run_sql(f\"drop schema if exists {project.test_schema}\")\n\n    def test_ok_trino(self, project):\n        run_dbt([\"debug\"])\n        assert \"ERROR\" not in self.capsys.readouterr().out\n\n\nclass TestDebugProfileVariableTrino(BaseDebugProfileVariable):\n    # TODO: below teardown method probably should be implemented in base class (on dbt-core side)\n    @pytest.fixture(scope=\"function\", autouse=True)\n    def teardown_method(self, project):\n        yield\n        project.run_sql(f\"drop schema if exists {project.test_schema}\")\n\n    def test_ok_trino(self, project):\n        run_dbt([\"debug\"])\n        assert \"ERROR\" not in self.capsys.readouterr().out\n"
  },
  {
    "path": "tests/functional/adapter/dbt_show/test_dbt_show.py",
    "content": "from dbt.tests.adapter.dbt_show.test_dbt_show import BaseShowLimit, BaseShowSqlHeader\n\n\nclass TestTrinoShowSqlHeader(BaseShowSqlHeader):\n    pass\n\n\nclass TestTrinoShowLimit(BaseShowLimit):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/empty/test_empty.py",
    "content": "from dbt.tests.adapter.empty.test_empty import (\n    BaseTestEmpty,\n    BaseTestEmptyInlineSourceRef,\n)\n\n\nclass TestTrinoEmpty(BaseTestEmpty):\n    pass\n\n\nclass TestTrinoEmptyInlineSourceRef(BaseTestEmptyInlineSourceRef):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/fixture_datediff.py",
    "content": "seeds__data_datediff_csv = \"\"\"first_date,second_date,datepart,result\n2018-01-01 01:00:00,2018-01-02 01:00:00,day,1\n2018-01-01 01:00:00,2018-02-01 01:00:00,month,1\n2018-01-01 01:00:00,2019-01-01 01:00:00,year,1\n2018-01-01 01:00:00,2018-01-01 02:00:00,hour,1\n2018-01-01 01:00:00,2018-01-01 02:01:00,minute,61\n2018-01-01 01:00:00,2018-01-01 02:00:01,second,3601\n2019-12-31 00:00:00,2019-12-27 00:00:00,week,-1\n2019-12-31 00:00:00,2019-12-30 00:00:00,week,0\n2019-12-31 00:00:00,2020-01-02 00:00:00,week,0\n2019-12-31 00:00:00,2020-01-06 02:00:00,week,1\n,2018-01-01 02:00:00,hour,\n2018-01-01 02:00:00,,hour,\n\"\"\"\n\n\nmodels__test_datediff_sql = \"\"\"\nwith data as (\n\n    select * from {{ ref('data_datediff') }}\n\n)\n\nselect\n\n    case\n        when datepart = 'second' then {{ datediff('first_date', 'second_date', 'second') }}\n        when datepart = 'minute' then {{ datediff('first_date', 'second_date', 'minute') }}\n        when datepart = 'hour' then {{ datediff('first_date', 'second_date', 'hour') }}\n        when datepart = 'day' then {{ datediff('first_date', 'second_date', 'day') }}\n        when datepart = 'week' then {{ datediff('first_date', 'second_date', 'week') }}\n        when datepart = 'month' then {{ datediff('first_date', 'second_date', 'month') }}\n        when datepart = 'year' then {{ datediff('first_date', 'second_date', 'year') }}\n        else null\n    end as actual,\n    result as expected\n\nfrom data\n\n-- Also test correct casting of literal values.\n\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"millisecond\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"second\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"minute\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"hour\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"day\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-03 00:00:00.000000'\", \"week\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"month\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"quarter\") }} as actual, 1 as expected\nunion all select {{ datediff(\"'1999-12-31 23:59:59.999000'\", \"'2000-01-01 00:00:00.000000'\", \"year\") }} as actual, 1 as expected\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/hooks/data/seed_model.sql",
    "content": "drop table if exists {schema}.on_model_hook;\n\ncreate table {schema}.on_model_hook (\n    test_state       VARCHAR, -- start|end\n    target_dbname    VARCHAR,\n    target_host      VARCHAR,\n    target_name      VARCHAR,\n    target_schema    VARCHAR,\n    target_type      VARCHAR,\n    target_user      VARCHAR,\n    target_pass      VARCHAR,\n    target_threads   INTEGER,\n    run_started_at   VARCHAR,\n    invocation_id    VARCHAR,\n    thread_id        VARCHAR\n);\n"
  },
  {
    "path": "tests/functional/adapter/hooks/data/seed_run.sql",
    "content": "drop table if exists {schema}.on_run_hook;\n\ncreate table {schema}.on_run_hook (\n    test_state       VARCHAR, -- start|end\n    target_dbname    VARCHAR,\n    target_host      VARCHAR,\n    target_name      VARCHAR,\n    target_schema    VARCHAR,\n    target_type      VARCHAR,\n    target_user      VARCHAR,\n    target_pass      VARCHAR,\n    target_threads   INTEGER,\n    run_started_at   VARCHAR,\n    invocation_id    VARCHAR,\n    thread_id        VARCHAR\n);\n"
  },
  {
    "path": "tests/functional/adapter/hooks/test_hooks_delete.py",
    "content": "# Test hooks with DELETE statement\nimport pytest\nfrom dbt.tests.util import run_dbt, run_sql_with_adapter\n\nseed = \"\"\"\nid,name,some_date\n1,Easton,1981-05-20\n2,Lillian,1978-09-03\n3,Jeremiah,1982-03-11\n4,Nolan,1976-05-06\n5,Hannah,1982-06-23ľ\n6,Eleanor,1991-08-10\n7,Lily,1971-03-29\n8,Jonathan,1988-02-26\n9,Adrian,1994-02-09\n10,Nora,1976-03-01\n\"\"\".lstrip()\n\nmodel = \"\"\"\n  {{ config(\n        materialized=\"table\",\n        on_table_exists = 'drop'\n     )\n  }}\n  select * from {{ ref('seed') }}\n\"\"\"\n\n\nclass BaseTestHooksDelete:\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"model.sql\": model,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"pre-hook\": \"DELETE FROM seed WHERE name IN ('Jeremiah','Eleanor');\",\n                \"post-hook\": \"DELETE FROM seed WHERE name IN ('Nolan','Jonathan','Nora');\",\n            }\n        }\n\n    def test_pre_and_post_run_hooks(self, project, dbt_profile_target):\n        # Run seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Check if table has all rows\n        sql_seed = \"SELECT COUNT(*) from seed\"\n        query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch=\"all\")\n        assert query_results[0][0] == 10\n\n        # Run model, hooks should run DELETE statements\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # 2 rows were deleted in pre-hook\n        sql_model = \"SELECT COUNT(*) from model\"\n        query_results = run_sql_with_adapter(project.adapter, sql_model, fetch=\"all\")\n        assert query_results[0][0] == 8\n\n        # 2 rows were deleted in pre-hook, and 3 in post-hook\n        query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch=\"all\")\n        assert query_results[0][0] == 5\n\n\n@pytest.mark.delta\nclass TestBaseTestHooksDeleteDelta(BaseTestHooksDelete):\n    pass\n\n\n@pytest.mark.iceberg\nclass TestBaseTestHooksDeleteIceberg(BaseTestHooksDelete):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/hooks/test_model_hooks.py",
    "content": "import pytest\nfrom dbt.tests.adapter.hooks import test_model_hooks as core_base\n\n\nclass TestTrinoPrePostModelHooks(core_base.TestPrePostModelHooks):\n    def check_hooks(self, state, project, host, count=1):\n        self.get_ctx_vars(state, count=count, project=project)\n\n\nclass TestTrinoPrePostModelHooksUnderscores(core_base.TestPrePostModelHooksUnderscores):\n    def check_hooks(self, state, project, host, count=1):\n        self.get_ctx_vars(state, count=count, project=project)\n\n\nclass TestTrinoHookRefs(core_base.TestHookRefs):\n    def check_hooks(self, state, project, host, count=1):\n        self.get_ctx_vars(state, count=count, project=project)\n\n\n@pytest.mark.iceberg\nclass TestTrinoPrePostModelHooksOnSeeds(core_base.TestPrePostModelHooksOnSeeds):\n    def check_hooks(self, state, project, host, count=1):\n        self.get_ctx_vars(state, count=count, project=project)\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"seed-paths\": [\"seeds\"],\n            \"models\": {},\n            \"seeds\": {\n                \"+post-hook\": [\n                    \"alter table {{ this }} add column new_col int\",\n                    \"update {{ this }} set new_col = 1 where 1=1\",\n                ],\n                \"quote_columns\": True,\n            },\n        }\n"
  },
  {
    "path": "tests/functional/adapter/hooks/test_run_hooks.py",
    "content": "import pytest\nfrom dbt.tests.adapter.hooks.test_run_hooks import (\n    BaseAfterRunHooks,\n    BasePrePostRunHooks,\n)\n\n\nclass TestPrePostRunHooksTrino(BasePrePostRunHooks):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            # The create and drop table statements here validate that these hooks run\n            # in the same order that they are defined. Drop before create is an error.\n            # Also check that the table does not exist below.\n            \"on-run-start\": [\n                \"{{ custom_run_hook('start', target, run_started_at, invocation_id) }}\",\n                \"create table {{ target.schema }}.start_hook_order_test ( id int )\",\n                \"drop table {{ target.schema }}.start_hook_order_test\",\n                \"{{ log(env_var('TERM_TEST'), info=True) }}\",\n            ],\n            \"on-run-end\": [\n                \"{{ custom_run_hook('end', target, run_started_at, invocation_id) }}\",\n                \"create table {{ target.schema }}.end_hook_order_test ( id int )\",\n                \"drop table {{ target.schema }}.end_hook_order_test\",\n                \"create table {{ target.schema }}.schemas ( schema varchar )\",\n                \"insert into {{ target.schema }}.schemas (schema) values {% for schema in schemas %}( '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}\",\n                \"create table {{ target.schema }}.db_schemas ( db varchar, schema varchar )\",\n                \"insert into {{ target.schema }}.db_schemas (db, schema) values {% for db, schema in database_schemas %}('{{ db }}', '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}\",\n            ],\n            \"seeds\": {\n                \"quote_columns\": False,\n            },\n        }\n\n    def check_hooks(self, state, project, host):\n        ctx = self.get_ctx_vars(state, project)\n\n        assert ctx[\"test_state\"] == state\n        assert ctx[\"target_dbname\"] == \"\"\n        assert ctx[\"target_host\"] == host\n        assert ctx[\"target_name\"] == \"default\"\n        assert ctx[\"target_schema\"] == project.test_schema\n        assert ctx[\"target_threads\"] == 4\n        assert ctx[\"target_type\"] == project.adapter_type\n        assert \"admin\" in ctx[\"target_user\"]\n        assert ctx[\"target_pass\"] == \"\"\n\n        assert (\n            ctx[\"run_started_at\"] is not None and len(ctx[\"run_started_at\"]) > 0\n        ), \"run_started_at was not set\"\n        assert (\n            ctx[\"invocation_id\"] is not None and len(ctx[\"invocation_id\"]) > 0\n        ), \"invocation_id was not set\"\n\n\nclass TestAfterRunHooksTrino(BaseAfterRunHooks):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/materialization/fixtures.py",
    "content": "seed_csv = \"\"\"\nid,name,some_date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n\"\"\".lstrip()\n\nmodel_sql = \"\"\"\nselect * from {{ ref('seed') }}\n\"\"\"\n\nmodel_cte_sql = \"\"\"\nwith source_data as (\n    select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4\n)\nselect id\n       ,field1\n       ,field2\n       ,field3\n       ,field4\nfrom source_data\n\"\"\"\n\n\nprofile_yml = \"\"\"\nversion: 2\nmodels:\n  - name: materialization\n    columns:\n      - name: id\n        tests:\n          - unique\n          - not_null\n      - name: name\n        tests:\n          - not_null\n\"\"\"\n\n\nschema_base_yml = \"\"\"\\\nversion: 2\n\nmodels:\n  - name: model_a\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_ignore\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_ignore_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_append_new_columns\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_append_new_columns_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_append_new_columns_remove_one\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_append_new_columns_remove_one_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns_quoted\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns_quoted_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns_diff_data_types\n    columns:\n      - name: id\n        tests:\n          - unique\n\n  - name: incremental_sync_all_columns_diff_data_types_target\n    columns:\n      - name: id\n        tests:\n          - unique\n\"\"\"\n\nmodel_a_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\n\nwith source_data as (\n\n    select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4\n    union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4\n    union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4\n    union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4\n    union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4\n    union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4\n\n)\n\nselect id\n       ,field1\n       ,field2\n       ,field3\n       ,field4\n\nfrom source_data\n\"\"\"\n\nincremental_ignore_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='ignore'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental() %}\n\nSELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nSELECT id, field1, field2 FROM source_data LIMIT 3\n\n{% endif %}\n\"\"\"\n\nincremental_ignore_target_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\n\nwith source_data as (\n\n    select * from {{ ref('model_a') }}\n\n)\n\nselect id\n       ,field1\n       ,field2\n\nfrom source_data\n\"\"\"\n\nincremental_append_new_columns = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='append_new_columns'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental()  %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field2 as varchar) as field2,\n       cast(field3 as varchar) as field3,\n       cast(field4 as varchar) as field4\nFROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field2 as varchar) as field2\nFROM source_data where id <= 3\n\n{% endif %}\n\"\"\"\n\nincremental_append_new_columns_target_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\n\nwith source_data as (\n\n    select * from {{ ref('model_a') }}\n\n)\n\nselect id\n       ,cast(field1 as varchar) as field1\n       ,cast(field2 as varchar) as field2\n       ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as varchar) AS field3\n       ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as varchar) AS field4\n\nfrom source_data\n\"\"\"\n\nincremental_append_new_columns_remove_one_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='append_new_columns'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental()  %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field3 as varchar) as field3,\n       cast(field4 as varchar) as field4\nFROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field2 as varchar) as field2\nFROM source_data where id <= 3\n\n{% endif %}\n\"\"\"\n\nincremental_append_new_columns_remove_one_target_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\nwith source_data as (\n\n    select * from {{ ref('model_a') }}\n\n)\n\nselect id,\n       cast(field1 as varchar) as field1,\n       cast(CASE WHEN id >  3 THEN NULL ELSE field2 END as varchar) AS field2,\n       cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as varchar) AS field3,\n       cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as varchar) AS field4\n\nfrom source_data\n\"\"\"\n\n\nincremental_fail_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='fail'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental()  %}\n\nSELECT id, field1, field2 FROM source_data\n\n{% else %}\n\nSELECT id, field1, field3 FROm source_data\n\n{% endif %}\n\"\"\"\n\nincremental_sync_all_columns_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='sync_all_columns'\n\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental() %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field3 as varchar) as field3, -- to validate new fields\n       cast(field4 as varchar) AS field4 -- to validate new fields\n\nFROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nselect id,\n       cast(field1 as varchar) as field1,\n       cast(field2 as varchar) as field2\n\nfrom source_data where id <= 3\n\n{% endif %}\n\"\"\"\n\nincremental_sync_all_columns_target_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\n\nwith source_data as (\n\n    select * from {{ ref('model_a') }}\n\n)\nselect id\n       ,cast(field1 as varchar) as field1\n       --,field2\n       ,cast(case when id <= 3 then null else field3 end as varchar) as field3\n       ,cast(case when id <= 3 then null else field4 end as varchar) as field4\n\nfrom source_data\norder by id\n\"\"\"\n\nincremental_sync_all_columns_quoted_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='sync_all_columns'\n\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental() %}\n\nSELECT id,\n       cast(field1 as varchar) as field1,\n       cast(field3 as varchar) as \"3field3\", -- to validate new fields\n       cast(field4 as varchar) AS \"4field4\" -- to validate new fields\n\nFROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nselect id,\n       cast(field1 as varchar) as field1,\n       cast(field2 as varchar) as \"2field2\"\n\nfrom source_data where id <= 3\n\n{% endif %}\n\"\"\"\n\nincremental_sync_all_columns_quoted_target_sql = \"\"\"\\\n{{\n    config(materialized='table')\n}}\n\nwith source_data as (\n\n    select * from {{ ref('model_a') }}\n\n)\nselect id\n       ,cast(field1 as varchar) as field1\n       --,field2\n       ,cast(case when id <= 3 then null else field3 end as varchar) as \"3field3\"\n       ,cast(case when id <= 3 then null else field4 end as varchar) as \"4field4\"\n\nfrom source_data\norder by id\n\"\"\"\n\nincremental_sync_all_columns_diff_data_types_sql = \"\"\"\\\n{{\n    config(\n        materialized='incremental',\n        unique_key='id',\n        on_schema_change='sync_all_columns'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\n{% if is_incremental() %}\n\nSELECT id,\n       cast(id as varchar) \"field1\" -- to validate data type changes\n\nFROM source_data WHERE id NOT IN (SELECT id from {{ this }} )\n\n{% else %}\n\nselect id,\n       id \"field1\"\n\nfrom source_data where id <= 3\norder by id\n{% endif %}\n\"\"\"\n\nincremental_sync_all_columns_diff_data_types_target_sql = \"\"\"\\\n{{\n    config(\n        materialized='table'\n    )\n}}\n\nWITH source_data AS (SELECT * FROM {{ ref('model_a') }} )\n\nselect id,\n       cast(id as varchar) \"field1\"\n\nfrom source_data\norder by id\n\"\"\"\n\nselect_from_a_sql = \"select * from {{ ref('model_a') }} where false\"\n\nselect_from_incremental_append_new_columns_sql = (\n    \"select * from {{ ref('incremental_append_new_columns') }} where false\"\n)\n\nselect_from_incremental_append_new_columns_remove_one_sql = (\n    \"select * from {{ ref('incremental_append_new_columns_remove_one') }} where false\"\n)\n\nselect_from_incremental_append_new_columns_remove_one_target_sql = (\n    \"select * from {{ ref('incremental_append_new_columns_remove_one_target') }} where false\"\n)\n\nselect_from_incremental_append_new_columns_target_sql = (\n    \"select * from {{ ref('incremental_append_new_columns_target') }} where false\"\n)\n\nselect_from_incremental_ignore_sql = \"select * from {{ ref('incremental_ignore') }} where false\"\n\nselect_from_incremental_ignore_target_sql = (\n    \"select * from {{ ref('incremental_ignore_target') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_target_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns_target') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_quoted_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns_quoted') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_quoted_target_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns_quoted_target') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_diff_data_types_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns_diff_data_types') }} where false\"\n)\n\nselect_from_incremental_sync_all_columns_diff_data_types_target_sql = (\n    \"select * from {{ ref('incremental_sync_all_columns_diff_data_types_target') }} where false\"\n)\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_delete_insert.py",
    "content": "import pytest\nfrom dbt.tests.adapter.incremental.test_incremental_predicates import (\n    BaseIncrementalPredicates,\n    models__delete_insert_incremental_predicates_sql,\n    seeds__expected_delete_insert_incremental_predicates_csv,\n)\nfrom dbt.tests.adapter.incremental.test_incremental_unique_id import (\n    BaseIncrementalUniqueKey,\n    models__duplicated_unary_unique_key_list_sql,\n    models__empty_str_unique_key_sql,\n    models__empty_unique_key_list_sql,\n    models__no_unique_key_sql,\n    models__nontyped_trinary_unique_key_list_sql,\n    models__not_found_unique_key_list_sql,\n    models__not_found_unique_key_sql,\n    models__str_unique_key_sql,\n    models__trinary_unique_key_list_sql,\n    models__unary_unique_key_list_sql,\n    seeds__seed_csv,\n)\nfrom dbt.tests.util import run_dbt_and_capture\n\nseeds__duplicate_insert_sql = \"\"\"\n-- Insert statement which when applied to seed.csv triggers the inplace\n--   overwrite strategy of incremental models. Seed and incremental model\n--   diverge.\n\n-- insert new row, which should not be in incremental model\n--  with primary or first three columns unique\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('CT','Hartford','Hartford',DATE '2022-02-14');\n\n\"\"\"\n\nseeds__add_new_rows_sql = \"\"\"\n-- Insert statement which when applied to seed.csv sees incremental model\n--   grow in size while not (necessarily) diverging from the seed itself.\n\n-- insert two new rows, both of which should be in incremental model\n--   with any unique columns\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('WA','King','Seattle',DATE '2022-02-01');\n\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('CA','Los Angeles','Los Angeles',DATE '2022-02-01');\n\n\"\"\"\n\nmodels__expected__one_str__overwrite_sql = \"\"\"\n{{\n    config(\n        materialized='table'\n    )\n}}\n\nselect\n    'CT' as state,\n    'Hartford' as county,\n    'Hartford' as city,\n    cast('2022-02-14' as date) as last_visit_date\nunion all\nselect 'MA','Suffolk','Boston',DATE '2020-02-12'\nunion all\nselect 'NJ','Mercer','Trenton',DATE '2022-01-01'\nunion all\nselect 'NY','Kings','Brooklyn',DATE '2021-04-02'\nunion all\nselect 'NY','New York','Manhattan',DATE '2021-04-01'\nunion all\nselect 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'\nunion all\nselect 'CO','Denver',null,DATE '2021-06-18'\n\n\"\"\"\n\nmodels__expected__unique_key_list__inplace_overwrite_sql = \"\"\"\n{{\n    config(\n        materialized='table'\n    )\n}}\n\nselect\n    'CT' as state,\n    'Hartford' as county,\n    'Hartford' as city,\n    cast('2022-02-14' as date) as last_visit_date\nunion all\nselect 'MA','Suffolk','Boston',DATE '2020-02-12'\nunion all\nselect 'NJ','Mercer','Trenton',DATE '2022-01-01'\nunion all\nselect 'NY','Kings','Brooklyn',DATE '2021-04-02'\nunion all\nselect 'NY','New York','Manhattan',DATE '2021-04-01'\nunion all\nselect 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'\nunion all\nselect 'CO','Denver',null,DATE '2021-06-18'\n\n\"\"\"\n\nmodels__location_specified = \"\"\"\n{{\n    config(\n        materialized='incremental',\n        incremental_strategy='delete+insert',\n        unique_key=['state', 'county', 'city'],\n        properties= {\n            \"location\": \"'s3a://datalake/model'\"\n        }\n    )\n}}\n\nselect\n    'CT' as state,\n    'Hartford' as county,\n    'Hartford' as city,\n    cast('2022-02-14' as date) as last_visit_date\nunion all\nselect 'MA','Suffolk','Boston',DATE '2020-02-12'\nunion all\nselect 'NJ','Mercer','Trenton',DATE '2022-01-01'\nunion all\nselect 'NY','Kings','Brooklyn',DATE '2021-04-02'\nunion all\nselect 'NY','New York','Manhattan',DATE '2021-04-01'\nunion all\nselect 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'\n\n\"\"\"\n\nmodels__delete_insert_composite_keys_sql = \"\"\"\n{{\n    config(\n        materialized='incremental',\n        incremental_strategy='delete+insert',\n        unique_key=['id', 'col']\n    )\n}}\nselect 1 as id, 1 as col\nunion all\nselect 1 as id, 3 as col\nunion all\nselect 3 as id, 1 as col\nunion all\nselect 3 as id, 3 as col\n\n{% if is_incremental() %}\n\nexcept\n(select 1 as id, 1 as col\nunion all\nselect 3 as id, 3 as col)\n\n{% endif %}\n\"\"\"\n\nseeds__expected_delete_insert_composite_keys_csv = \"\"\"id,col\n1,1\n1,3\n3,1\n3,3\n\"\"\"\n\n\nclass TrinoIncrementalUniqueKey(BaseIncrementalUniqueKey):\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"duplicate_insert.sql\": seeds__duplicate_insert_sql,\n            \"seed.csv\": seeds__seed_csv,\n            \"add_new_rows.sql\": seeds__add_new_rows_sql,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"trinary_unique_key_list.sql\": models__trinary_unique_key_list_sql,\n            \"nontyped_trinary_unique_key_list.sql\": models__nontyped_trinary_unique_key_list_sql,\n            \"unary_unique_key_list.sql\": models__unary_unique_key_list_sql,\n            \"not_found_unique_key.sql\": models__not_found_unique_key_sql,\n            \"empty_unique_key_list.sql\": models__empty_unique_key_list_sql,\n            \"no_unique_key.sql\": models__no_unique_key_sql,\n            \"empty_str_unique_key.sql\": models__empty_str_unique_key_sql,\n            \"str_unique_key.sql\": models__str_unique_key_sql,\n            \"duplicated_unary_unique_key_list.sql\": models__duplicated_unary_unique_key_list_sql,\n            \"not_found_unique_key_list.sql\": models__not_found_unique_key_list_sql,\n            \"expected\": {\n                \"one_str__overwrite.sql\": models__expected__one_str__overwrite_sql,\n                \"unique_key_list__inplace_overwrite.sql\": models__expected__unique_key_list__inplace_overwrite_sql,\n            },\n        }\n\n\n@pytest.mark.iceberg\nclass TestIcebergIncrementalDeleteInsert(TrinoIncrementalUniqueKey):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"incremental\",\n            \"models\": {\"+incremental_strategy\": \"delete+insert\"},\n            \"seeds\": {\"incremental\": {\"seed\": {\"+column_types\": {\"some_date\": \"date\"}}}},\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaIncrementalDeleteInsert(TrinoIncrementalUniqueKey):\n    def test__no_unique_keys(self, project):\n        super().test__no_unique_keys(project)\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"incremental\",\n            \"models\": {\"+on_table_exists\": \"drop\", \"+incremental_strategy\": \"delete+insert\"},\n            \"seeds\": {\"incremental\": {\"seed\": {\"+column_types\": {\"some_date\": \"date\"}}}},\n        }\n\n\n@pytest.mark.iceberg\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestIcebergIncrementalDeleteInsertWithLocation:\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"model.sql\": models__location_specified,\n        }\n\n    def test_temporary_table_location(self, project):\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert f'create table \"{project.database}\".\"{project.test_schema}\".\"model\"' in logs\n        assert \"location = 's3a://datalake/model'\" in logs\n\n        # Temporary table is created on the second run\n        # So, now we check if the second run is successful and location\n        # is patched correctly\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"model__dbt_tmp\"' in logs\n        )\n        assert \"location = 's3a://datalake/model__dbt_tmp'\" in logs\n\n\n@pytest.mark.iceberg\nclass TestIcebergCompositeUniqueKeys(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"expected_delete_insert_incremental_predicates.csv\": seeds__expected_delete_insert_incremental_predicates_csv,\n            \"expected_delete_insert_composite_keys.csv\": seeds__expected_delete_insert_composite_keys_csv,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"delete_insert_incremental_predicates.sql\": models__delete_insert_incremental_predicates_sql,\n            \"delete_insert_composite_keys.sql\": models__delete_insert_composite_keys_sql,\n        }\n\n    def test__incremental_predicates_composite_keys(self, project):\n        \"\"\"seed should match model after two incremental runs\"\"\"\n\n        expected_fields = self.get_expected_fields(\n            relation=\"expected_delete_insert_composite_keys\", seed_rows=4\n        )\n        test_case_fields = self.get_test_fields(\n            project,\n            seed=\"expected_delete_insert_composite_keys\",\n            incremental_model=\"delete_insert_composite_keys\",\n            update_sql_file=None,\n        )\n        self.check_scenario_correctness(expected_fields, test_case_fields, project)\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_merge.py",
    "content": "import pytest\nfrom dbt.tests.adapter.incremental.test_incremental_unique_id import (\n    BaseIncrementalUniqueKey,\n    models__duplicated_unary_unique_key_list_sql,\n    models__empty_str_unique_key_sql,\n    models__empty_unique_key_list_sql,\n    models__no_unique_key_sql,\n    models__nontyped_trinary_unique_key_list_sql,\n    models__not_found_unique_key_list_sql,\n    models__not_found_unique_key_sql,\n    models__str_unique_key_sql,\n    models__trinary_unique_key_list_sql,\n    models__unary_unique_key_list_sql,\n    seeds__seed_csv,\n)\n\nseeds__duplicate_insert_sql = \"\"\"\n-- Insert statement which when applied to seed.csv triggers the inplace\n--   overwrite strategy of incremental models. Seed and incremental model\n--   diverge.\n\n-- insert new row, which should not be in incremental model\n--  with primary or first three columns unique\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('CT','Hartford','Hartford',DATE '2022-02-14');\n\n\"\"\"\n\nseeds__add_new_rows_sql = \"\"\"\n-- Insert statement which when applied to seed.csv sees incremental model\n--   grow in size while not (necessarily) diverging from the seed itself.\n\n-- insert two new rows, both of which should be in incremental model\n--   with any unique columns\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('WA','King','Seattle',DATE '2022-02-01');\n\ninsert into {schema}.seed\n    (state, county, city, last_visit_date)\nvalues ('CA','Los Angeles','Los Angeles',DATE '2022-02-01');\n\n\"\"\"\n\nmodels__expected__one_str__overwrite_sql = \"\"\"\n{{\n    config(\n        materialized='table'\n    )\n}}\n\nselect\n    'CT' as state,\n    'Hartford' as county,\n    'Hartford' as city,\n    cast('2022-02-14' as date) as last_visit_date\nunion all\nselect 'MA','Suffolk','Boston',DATE '2020-02-12'\nunion all\nselect 'NJ','Mercer','Trenton',DATE '2022-01-01'\nunion all\nselect 'NY','Kings','Brooklyn',DATE '2021-04-02'\nunion all\nselect 'NY','New York','Manhattan',DATE '2021-04-01'\nunion all\nselect 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'\nunion all\nselect 'CO','Denver',null,DATE '2021-06-18'\n\n\"\"\"\n\nmodels__expected__unique_key_list__inplace_overwrite_sql = \"\"\"\n{{\n    config(\n        materialized='table'\n    )\n}}\n\nselect\n    'CT' as state,\n    'Hartford' as county,\n    'Hartford' as city,\n    cast('2022-02-14' as date) as last_visit_date\nunion all\nselect 'MA','Suffolk','Boston',DATE '2020-02-12'\nunion all\nselect 'NJ','Mercer','Trenton',DATE '2022-01-01'\nunion all\nselect 'NY','Kings','Brooklyn',DATE '2021-04-02'\nunion all\nselect 'NY','New York','Manhattan',DATE '2021-04-01'\nunion all\nselect 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'\nunion all\nselect 'CO','Denver',null,DATE '2021-06-18'\n\n\"\"\"\n\n\nclass TrinoIncrementalUniqueKey(BaseIncrementalUniqueKey):\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"duplicate_insert.sql\": seeds__duplicate_insert_sql,\n            \"seed.csv\": seeds__seed_csv,\n            \"add_new_rows.sql\": seeds__add_new_rows_sql,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"trinary_unique_key_list.sql\": models__trinary_unique_key_list_sql,\n            \"nontyped_trinary_unique_key_list.sql\": models__nontyped_trinary_unique_key_list_sql,\n            \"unary_unique_key_list.sql\": models__unary_unique_key_list_sql,\n            \"not_found_unique_key.sql\": models__not_found_unique_key_sql,\n            \"empty_unique_key_list.sql\": models__empty_unique_key_list_sql,\n            \"no_unique_key.sql\": models__no_unique_key_sql,\n            \"empty_str_unique_key.sql\": models__empty_str_unique_key_sql,\n            \"str_unique_key.sql\": models__str_unique_key_sql,\n            \"duplicated_unary_unique_key_list.sql\": models__duplicated_unary_unique_key_list_sql,\n            \"not_found_unique_key_list.sql\": models__not_found_unique_key_list_sql,\n            \"expected\": {\n                \"one_str__overwrite.sql\": models__expected__one_str__overwrite_sql,\n                \"unique_key_list__inplace_overwrite.sql\": models__expected__unique_key_list__inplace_overwrite_sql,\n            },\n        }\n\n\n@pytest.mark.iceberg\nclass TestIcebergIncrementalMerge(TrinoIncrementalUniqueKey):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"incremental\",\n            \"models\": {\"+incremental_strategy\": \"merge\"},\n            \"seeds\": {\"incremental\": {\"seed\": {\"+column_types\": {\"some_date\": \"date\"}}}},\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaIncrementalMerge(TrinoIncrementalUniqueKey):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"incremental\",\n            \"models\": {\n                \"+on_table_exists\": \"drop\",\n                \"+incremental_strategy\": \"merge\",\n            },\n            \"seeds\": {\"incremental\": {\"seed\": {\"+column_types\": {\"some_date\": \"date\"}}}},\n        }\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_microbatch.py",
    "content": "import pytest\nfrom dbt.tests.adapter.incremental.test_incremental_microbatch import BaseMicrobatch\n\n\n@pytest.mark.iceberg\nclass TestTrinoMicrobatchIceberg(BaseMicrobatch):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_predicates.py",
    "content": "import pytest\nfrom dbt.tests.adapter.incremental.test_incremental_predicates import (\n    BaseIncrementalPredicates,\n)\n\n\n@pytest.mark.iceberg\nclass TestIcebergPredicatesDeleteInsertTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"models\": {\"+predicates\": [\"id != 2\"], \"+incremental_strategy\": \"delete+insert\"}}\n\n\n@pytest.mark.delta\nclass TestDeltaPredicatesDeleteInsertTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"models\": {\"+predicates\": [\"id != 2\"], \"+incremental_strategy\": \"delete+insert\"}}\n\n\n@pytest.mark.iceberg\nclass TestIcebergIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"+incremental_predicates\": [\"dbt_internal_dest.id != 2\"],\n                \"+incremental_strategy\": \"merge\",\n            }\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"+incremental_predicates\": [\"dbt_internal_dest.id != 2\"],\n                \"+incremental_strategy\": \"merge\",\n            }\n        }\n\n\n@pytest.mark.iceberg\nclass TestIcebergPredicatesMergeTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"+predicates\": [\"dbt_internal_dest.id != 2\"],\n                \"+incremental_strategy\": \"merge\",\n            }\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaPredicatesMergeTrino(BaseIncrementalPredicates):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"+predicates\": [\"dbt_internal_dest.id != 2\"],\n                \"+incremental_strategy\": \"merge\",\n            }\n        }\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_schema.py",
    "content": "import pytest\nfrom dbt.tests.util import check_relations_equal, run_dbt\n\nfrom tests.functional.adapter.materialization.fixtures import (\n    incremental_append_new_columns,\n    incremental_append_new_columns_remove_one_sql,\n    incremental_append_new_columns_remove_one_target_sql,\n    incremental_append_new_columns_target_sql,\n    incremental_fail_sql,\n    incremental_ignore_sql,\n    incremental_ignore_target_sql,\n    incremental_sync_all_columns_diff_data_types_sql,\n    incremental_sync_all_columns_diff_data_types_target_sql,\n    incremental_sync_all_columns_quoted_sql,\n    incremental_sync_all_columns_quoted_target_sql,\n    incremental_sync_all_columns_sql,\n    incremental_sync_all_columns_target_sql,\n    model_a_sql,\n    schema_base_yml,\n    select_from_a_sql,\n    select_from_incremental_append_new_columns_remove_one_sql,\n    select_from_incremental_append_new_columns_remove_one_target_sql,\n    select_from_incremental_append_new_columns_sql,\n    select_from_incremental_append_new_columns_target_sql,\n    select_from_incremental_ignore_sql,\n    select_from_incremental_ignore_target_sql,\n    select_from_incremental_sync_all_columns_diff_data_types_sql,\n    select_from_incremental_sync_all_columns_diff_data_types_target_sql,\n    select_from_incremental_sync_all_columns_quoted_sql,\n    select_from_incremental_sync_all_columns_quoted_target_sql,\n    select_from_incremental_sync_all_columns_sql,\n    select_from_incremental_sync_all_columns_target_sql,\n)\n\n\nclass OnSchemaChangeBase:\n    # configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\"name\": \"on_schema_change\"}\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"model_a.sql\": model_a_sql,\n            \"incremental_ignore.sql\": incremental_ignore_sql,\n            \"incremental_ignore_target.sql\": incremental_ignore_target_sql,\n            \"incremental_append_new_columns.sql\": incremental_append_new_columns,\n            \"incremental_append_new_columns_target.sql\": incremental_append_new_columns_target_sql,\n            \"incremental_append_new_columns_remove_one.sql\": incremental_append_new_columns_remove_one_sql,\n            \"incremental_append_new_columns_remove_one_target.sql\": incremental_append_new_columns_remove_one_target_sql,\n            \"incremental_fail.sql\": incremental_fail_sql,\n            \"incremental_sync_all_columns.sql\": incremental_sync_all_columns_sql,\n            \"incremental_sync_all_columns_target.sql\": incremental_sync_all_columns_target_sql,\n            \"incremental_sync_all_columns_quoted.sql\": incremental_sync_all_columns_quoted_sql,\n            \"incremental_sync_all_columns_quoted_target.sql\": incremental_sync_all_columns_quoted_target_sql,\n            \"incremental_sync_all_columns_diff_data_types.sql\": incremental_sync_all_columns_diff_data_types_sql,\n            \"incremental_sync_all_columns_diff_data_types_target.sql\": incremental_sync_all_columns_diff_data_types_target_sql,\n            \"schema.yml\": schema_base_yml,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def tests(self):\n        return {\n            \"select_from_a.sql\": select_from_a_sql,\n            \"select_from_incremental_append_new_columns.sql\": select_from_incremental_append_new_columns_sql,\n            \"select_from_incremental_append_new_columns_remove_one.sql\": select_from_incremental_append_new_columns_remove_one_sql,\n            \"select_from_incremental_append_new_columns_remove_one_target.sql\": select_from_incremental_append_new_columns_remove_one_target_sql,\n            \"select_from_incremental_append_new_columns_target.sql\": select_from_incremental_append_new_columns_target_sql,\n            \"select_from_incremental_ignore.sql\": select_from_incremental_ignore_sql,\n            \"select_from_incremental_ignore_target.sql\": select_from_incremental_ignore_target_sql,\n            \"select_from_incremental_sync_all_columns.sql\": select_from_incremental_sync_all_columns_sql,\n            \"select_from_incremental_sync_all_columns_target.sql\": select_from_incremental_sync_all_columns_target_sql,\n            \"select_from_incremental_sync_all_columns_quoted.sql\": select_from_incremental_sync_all_columns_quoted_sql,\n            \"select_from_incremental_sync_all_columns_quoted_target.sql\": select_from_incremental_sync_all_columns_quoted_target_sql,\n            \"select_from_incremental_sync_all_columns_diff_data_types.sql\": select_from_incremental_sync_all_columns_diff_data_types_sql,\n            \"select_from_incremental_sync_all_columns_diff_data_types_target.sql\": select_from_incremental_sync_all_columns_diff_data_types_target_sql,\n        }\n\n    def list_tests_and_assert(self, include, exclude, expected_tests):\n        list_args = [\"ls\", \"--resource-type\", \"test\"]\n        if include:\n            list_args.extend((\"--select\", include))\n        if exclude:\n            list_args.extend((\"--exclude\", exclude))\n        listed = run_dbt(list_args)\n        print(listed)\n        assert len(listed) == len(expected_tests)\n        test_names = [name.split(\".\")[-1] for name in listed]\n        assert sorted(test_names) == sorted(expected_tests)\n\n    def run_tests_and_assert(\n        self, project, include, exclude, expected_tests, compare_source, compare_target\n    ):\n        run_args = [\"run\"]\n        if include:\n            run_args.extend((\"--models\", include))\n        results_one = run_dbt(run_args)\n        results_two = run_dbt(run_args)\n\n        assert len(results_one) == 3\n        assert len(results_two) == 3\n\n        test_args = [\"test\"]\n        if include:\n            test_args.extend((\"--models\", include))\n        if exclude:\n            test_args.extend((\"--exclude\", exclude))\n\n        results = run_dbt(test_args)\n        tests_run = [r.node.name for r in results]\n        assert len(tests_run) == len(expected_tests)\n        assert sorted(tests_run) == sorted(expected_tests)\n        check_relations_equal(project.adapter, [compare_source, compare_target])\n\n    def run_incremental_ignore(self, project):\n        select = \"model_a incremental_ignore incremental_ignore_target\"\n        compare_source = \"incremental_ignore\"\n        compare_target = \"incremental_ignore_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_ignore\",\n            \"select_from_incremental_ignore_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_ignore_id\",\n            \"unique_incremental_ignore_target_id\",\n        ]\n\n        self.list_tests_and_assert(select, exclude, expected)\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_append_new_columns(self, project):\n        select = \"model_a incremental_append_new_columns incremental_append_new_columns_target\"\n        compare_source = \"incremental_append_new_columns\"\n        compare_target = \"incremental_append_new_columns_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_append_new_columns\",\n            \"select_from_incremental_append_new_columns_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_append_new_columns_id\",\n            \"unique_incremental_append_new_columns_target_id\",\n        ]\n        self.list_tests_and_assert(select, exclude, expected)\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_append_new_columns_remove_one(self, project):\n        select = \"model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target\"\n        compare_source = \"incremental_append_new_columns_remove_one\"\n        compare_target = \"incremental_append_new_columns_remove_one_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_append_new_columns_remove_one\",\n            \"select_from_incremental_append_new_columns_remove_one_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_append_new_columns_remove_one_id\",\n            \"unique_incremental_append_new_columns_remove_one_target_id\",\n        ]\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_sync_all_columns(self, project):\n        select = \"model_a incremental_sync_all_columns incremental_sync_all_columns_target\"\n        compare_source = \"incremental_sync_all_columns\"\n        compare_target = \"incremental_sync_all_columns_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_sync_all_columns\",\n            \"select_from_incremental_sync_all_columns_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_sync_all_columns_id\",\n            \"unique_incremental_sync_all_columns_target_id\",\n        ]\n        self.list_tests_and_assert(select, exclude, expected)\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_sync_all_columns_quoted(self, project):\n        select = \"model_a incremental_sync_all_columns_quoted incremental_sync_all_columns_quoted_target\"\n        compare_source = \"incremental_sync_all_columns_quoted\"\n        compare_target = \"incremental_sync_all_columns_quoted_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_sync_all_columns_quoted\",\n            \"select_from_incremental_sync_all_columns_quoted_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_sync_all_columns_quoted_id\",\n            \"unique_incremental_sync_all_columns_quoted_target_id\",\n        ]\n        self.list_tests_and_assert(select, exclude, expected)\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_sync_all_columns_data_type_change(self, project):\n        select = \"model_a incremental_sync_all_columns_diff_data_types incremental_sync_all_columns_diff_data_types_target\"\n        compare_source = \"incremental_sync_all_columns_diff_data_types\"\n        compare_target = \"incremental_sync_all_columns_diff_data_types_target\"\n        exclude = None\n        expected = [\n            \"select_from_a\",\n            \"select_from_incremental_sync_all_columns_diff_data_types\",\n            \"select_from_incremental_sync_all_columns_diff_data_types_target\",\n            \"unique_model_a_id\",\n            \"unique_incremental_sync_all_columns_diff_data_types_id\",\n            \"unique_incremental_sync_all_columns_diff_data_types_target_id\",\n        ]\n        self.list_tests_and_assert(select, exclude, expected)\n        self.run_tests_and_assert(\n            project, select, exclude, expected, compare_source, compare_target\n        )\n\n    def run_incremental_fail_on_schema_change(self, _):\n        select = \"model_a incremental_fail\"\n        run_dbt([\"run\", \"--models\", select, \"--full-refresh\"])\n        results_two = run_dbt([\"run\", \"--models\", select], expect_pass=False)\n        assert \"Compilation Error\" in results_two[1].message\n\n    def test_run_incremental_ignore(self, project):\n        self.run_incremental_ignore(project)\n\n    def test_run_incremental_append_new_columns(self, project):\n        self.run_incremental_append_new_columns(project)\n        self.run_incremental_append_new_columns_remove_one(project)\n\n    def test_run_incremental_sync_all_columns(self, project):\n        self.run_incremental_sync_all_columns(project)\n        self.run_incremental_sync_all_columns_quoted(project)\n\n    def test_run_incremental_sync_all_columns_data_type_change(self, project):\n        self.run_incremental_sync_all_columns_data_type_change(project)\n\n    def test_run_incremental_fail_on_schema_change(self, project):\n        self.run_incremental_fail_on_schema_change(project)\n\n\n@pytest.mark.iceberg\nclass TestIcebergOnSchemaChange(OnSchemaChangeBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"on_schema_change_iceberg\",\n            \"models\": {\"+incremental_strategy\": \"merge\"},\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaOnSchemaChange(OnSchemaChangeBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"on_schema_change_delta\",\n            \"models\": {\n                \"+on_table_exists\": \"drop\",\n                \"+incremental_strategy\": \"merge\",\n            },\n        }\n\n    @pytest.mark.xfail(reason=\"This connector does not support dropping columns\")\n    def test_run_incremental_sync_all_columns(self, project):\n        super(TestDeltaOnSchemaChange, self).test_run_incremental_sync_all_columns(project)\n\n    @pytest.mark.xfail(reason=\"This connector does not support dropping columns\")\n    def test_run_incremental_sync_all_columns_data_type_change(self, project):\n        super(\n            TestDeltaOnSchemaChange, self\n        ).test_run_incremental_sync_all_columns_data_type_change(project)\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_incremental_views_enabled.py",
    "content": "import pytest\nfrom dbt.tests.util import run_dbt, run_dbt_and_capture\n\nfrom tests.functional.adapter.materialization.fixtures import model_sql, seed_csv\n\n\nclass BaseViewsEnabled:\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"materialization.sql\": model_sql,\n        }\n\n\nclass TestViewsEnabledTrue(BaseViewsEnabled):\n    \"\"\"\n    Testing without views_enabled config specified, which defaults to views_enabled = True configuration\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"views_enabled_true\",\n            \"models\": {\"+materialized\": \"incremental\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'''create or replace view\n    \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\"'''\n            in logs\n        )\n\n\nclass TestViewsEnabledFalse(BaseViewsEnabled):\n    \"\"\"\n    Testing views_enabled = False configuration for incremental materialization\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"views_enabled_false\",\n            \"models\": {\"+materialized\": \"incremental\", \"+views_enabled\": False},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\"'\n            in logs\n        )\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_materialized_view.py",
    "content": "import pytest\nfrom dbt.tests.util import (\n    check_relation_types,\n    check_relations_equal,\n    run_dbt,\n    run_dbt_and_capture,\n    run_sql_with_adapter,\n)\n\nfrom tests.functional.adapter.materialization.fixtures import (\n    model_cte_sql,\n    model_sql,\n    seed_csv,\n)\n\n\n# TODO: teardown_method is needed to properly remove relations and schemas after tests.\n#  It could be refactored and simplified when CASCADE will be supported in Iceberg, delta, hive connectors\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewBase:\n    @pytest.fixture(scope=\"function\", autouse=True)\n    def teardown_method(self, project):\n        yield\n        # Drop materialized views first, then drop schema\n        sql = \"select * from system.metadata.materialized_views\"\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n        for mv in results:\n            project.run_sql(f\"drop materialized view {mv[0]}.{mv[1]}.{mv[2]}\")\n\n        relation = project.adapter.Relation.create(\n            database=project.database, schema=project.test_schema\n        )\n        project.adapter.drop_schema(relation)\n\n\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewExists(TestIcebergMaterializedViewBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"materialized_view\",\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"my_view.sql\": \"select 1 a\",\n            \"my_table.sql\": \"\"\" {{\n    config(materialized='table')\n}}\nselect 1 a\"\"\",\n        }\n\n    def test_mv_is_dropped_when_model_runs_view(self, project):\n        project.adapter.execute(\"CREATE OR REPLACE MATERIALIZED VIEW my_view AS SELECT 2 b\")\n        project.adapter.execute(\"CREATE OR REPLACE MATERIALIZED VIEW my_table AS SELECT 2 b\")\n\n        # check relation types\n        expected = {\n            \"my_table\": \"materialized_view\",\n            \"my_view\": \"materialized_view\",\n        }\n        check_relation_types(project.adapter, expected)\n\n        model_count = len(run_dbt([\"run\"]))\n        assert model_count == 2\n\n        # check relation types\n        expected = {\n            \"my_view\": \"view\",\n            \"my_table\": \"table\",\n        }\n        check_relation_types(project.adapter, expected)\n\n\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewWithCTE(TestIcebergMaterializedViewBase):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"mv_cte_test\",\n            \"models\": {\n                \"+materialized\": \"materialized_view\",\n            },\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"mat_view.sql\": model_cte_sql,\n        }\n\n    def test_mv_with_cte_is_created(self, project):\n        # Create MV\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewCreate(TestIcebergMaterializedViewBase):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"mv_test\",\n            \"models\": {\n                \"+materialized\": \"materialized_view\",\n            },\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"mat_view.sql\": model_sql,\n        }\n\n    def test_mv_is_created_and_refreshed(self, project):\n        catalog = project.adapter.config.credentials.database\n        schema = project.adapter.config.credentials.schema\n\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create MV\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"mat_view\"])\n\n        # Add one row to seed\n        sql = f\"\"\"INSERT INTO {catalog}.{schema}.seed\n        VALUES (5, 'Mateo', timestamp '2014-09-07 17:04:27')\"\"\"\n        run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n\n        # Refresh MV\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Check if one row is added in MV\n        sql = f\"select * from {catalog}.{schema}.mat_view\"\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n        assert len(results) == 5\n\n\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewDropAndCreate(TestIcebergMaterializedViewBase):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"mv_test\",\n            \"models\": {\n                \"+materialized\": \"materialized_view\",\n                \"+full_refresh\": True,\n            },\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"mat_view_overrides_table.sql\": model_sql,\n            \"mat_view_overrides_view.sql\": model_sql,\n            \"mat_view_overrides_materialized_view.sql\": model_sql,\n        }\n\n    def test_mv_overrides_relation(self, project):\n        # Create relation with same name\n        project.adapter.execute(\"CREATE VIEW mat_view_overrides_view AS SELECT 3 c\")\n        project.adapter.execute(\"CREATE TABLE mat_view_overrides_table AS SELECT 4 d\")\n        project.adapter.execute(\n            \"CREATE MATERIALIZED VIEW mat_view_overrides_materialized_view AS SELECT 5 e\"\n        )\n\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create MVs, already existing relations with same name should be dropped\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 3\n\n        # Check if MVs were created correctly\n        expected = {\n            \"mat_view_overrides_view\": \"materialized_view\",\n            \"mat_view_overrides_table\": \"materialized_view\",\n            \"mat_view_overrides_materialized_view\": \"materialized_view\",\n        }\n        check_relation_types(project.adapter, expected)\n\n        check_relations_equal(\n            project.adapter,\n            [\n                \"seed\",\n                \"mat_view_overrides_view\",\n                \"mat_view_overrides_table\",\n                \"mat_view_overrides_materialized_view\",\n            ],\n        )\n\n\n@pytest.mark.iceberg\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestIcebergMaterializedViewProperties(TestIcebergMaterializedViewBase):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"mv_test\",\n            \"models\": {\n                \"+materialized\": \"materialized_view\",\n                \"+properties\": {\"format\": \"'PARQUET'\"},\n            },\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"mat_view.sql\": model_sql,\n        }\n\n    def test_set_mv_properties(self, project):\n        catalog = project.adapter.config.credentials.database\n        schema = project.adapter.config.credentials.schema\n\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create MV\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Retrieve MV properties\n        sql = f\"SHOW CREATE MATERIALIZED VIEW {catalog}.{schema}.mat_view\"\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n        assert \"format = 'PARQUET'\" in results[0][0]\n\n\n@pytest.mark.iceberg\nclass TestIcebergMaterializedViewWithGracePeriod(TestIcebergMaterializedViewBase):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"mv_test\",\n            \"models\": {\n                \"+materialized\": \"materialized_view\",\n                \"+grace_period\": \"INTERVAL '3' SECOND\",\n            },\n        }\n\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"mat_view.sql\": model_sql,\n        }\n\n    def test_set_mv_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create MV\n        results, log_output = run_dbt_and_capture([\"run\", \"--debug\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"grace period INTERVAL '3' SECOND\" in log_output\n\n        # Check if MVs were created correctly\n        check_relation_types(project.adapter, {\"mat_view\": \"materialized_view\"})\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_on_table_exists.py",
    "content": "import pytest\nfrom dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture\n\nfrom tests.functional.adapter.materialization.fixtures import (\n    model_sql,\n    profile_yml,\n    seed_csv,\n)\n\n\nclass BaseOnTableExists:\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"materialization.sql\": model_sql,\n            \"materialization.yml\": profile_yml,\n        }\n\n\nclass TestOnTableExistsRename(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `rename` configuration for table materialization,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_rename\",\n            \"models\": {\"+materialized\": \"table\", \"+on_table_exists\": \"rename\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'rename'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"materialization\"' in logs\n        )\n        assert \"alter table\" not in logs\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\"'\n            in logs\n        )\n        assert (\n            f'alter table \"{project.database}\".\"{project.test_schema}\".\"materialization\" rename to \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_backup\"'\n            in logs\n        )\n        assert (\n            f'alter table \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\" rename to \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            in logs\n        )\n        assert (\n            f'drop table if exists \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_backup\"'\n            in logs\n        )\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\nclass TestOnTableExistsRenameIncrementalFullRefresh(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `rename` configuration for incremental materialization and full refresh flag,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_rename\",\n            \"models\": {\"+materialized\": \"incremental\", \"+on_table_exists\": \"rename\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'rename'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"materialization\"' in logs\n        )\n        assert \"alter table\" not in logs\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\", \"--full-refresh\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\"'\n            in logs\n        )\n        assert (\n            f'alter table \"{project.database}\".\"{project.test_schema}\".\"materialization\" rename to \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_backup\"'\n            in logs\n        )\n        assert (\n            f'alter table \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_tmp\" rename to \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            in logs\n        )\n        assert (\n            f'drop table if exists \"{project.database}\".\"{project.test_schema}\".\"materialization__dbt_backup\"'\n            in logs\n        )\n        assert \"create or replace view\" not in logs\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\nclass TestOnTableExistsDrop(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `drop` configuration for table materialization,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_drop\",\n            \"models\": {\"+materialized\": \"table\", \"+on_table_exists\": \"drop\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'drop'\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\nclass TestOnTableExistsDropIncrementalFullRefresh(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `drop` configuration for incremental materialization and full refresh flag,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_drop\",\n            \"models\": {\"+materialized\": \"incremental\", \"+on_table_exists\": \"drop\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'drop'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'drop table if exists \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            not in logs\n        )\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\", \"--full-refresh\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'drop table if exists \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            in logs\n        )\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\nclass BaseOnTableExistsReplace(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `replace` configuration for table materialization,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_drop\",\n            \"models\": {\"+materialized\": \"table\", \"+on_table_exists\": \"replace\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'replace'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"create or replace table\" in logs\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"create or replace table\" in logs\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\n@pytest.mark.iceberg\nclass TestOnTableExistsReplaceIceberg(BaseOnTableExistsReplace):\n    pass\n\n\n@pytest.mark.delta\nclass TestOnTableExistsReplaceDelta(BaseOnTableExistsReplace):\n    pass\n\n\nclass BaseOnTableExistsReplaceIncrementalFullRefresh(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `replace` configuration for incremental materialization and full refresh flag,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_drop\",\n            \"models\": {\"+materialized\": \"incremental\", \"+on_table_exists\": \"replace\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'replace'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"create or replace table\" not in logs\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\", \"--full-refresh\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"create or replace table\" in logs\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\n@pytest.mark.iceberg\nclass TestOnTableExistsReplaceIcebergIncrementalFullRefresh(\n    BaseOnTableExistsReplaceIncrementalFullRefresh\n):\n    pass\n\n\n@pytest.mark.delta\nclass TestOnTableExistsReplaceDeltaIncrementalFullRefresh(\n    BaseOnTableExistsReplaceIncrementalFullRefresh\n):\n    pass\n\n\nclass TestOnTableExistsSkip(BaseOnTableExists):\n    \"\"\"\n    Testing on_table_exists = `skip` configuration for table materialization,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"table_rename\",\n            \"models\": {\"+materialized\": \"table\", \"+on_table_exists\": \"skip\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models two times to check on_table_exists = 'skip'\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table if not exists \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            in logs\n        )\n        assert \"alter table\" not in logs\n        assert \"drop table\" not in logs\n        assert \"or replace\" not in logs\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert (\n            f'create table if not exists \"{project.database}\".\"{project.test_schema}\".\"materialization\"'\n            in logs\n        )\n        assert \"alter table\" not in logs\n        assert \"drop table\" not in logs\n        assert \"or replace\" not in logs\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_prepared_statements.py",
    "content": "import pytest\nfrom dbt.tests.util import check_relations_equal, run_dbt\n\nfrom tests.functional.adapter.materialization.fixtures import (\n    model_sql,\n    profile_yml,\n    seed_csv,\n)\n\n\nclass PreparedStatementsBase:\n    \"\"\"\n    Testing prepared_statements_enabled profile configuration using dbt\n    seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    # configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"test_prepared_statements\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"materialization.sql\": model_sql,\n            \"materialization.yml\": profile_yml,\n        }\n\n    def retrieve_num_prepared_statements(self, trino_connection):\n        cur = trino_connection.cursor()\n        cur.execute(\"select query from system.runtime.queries order by query_id desc limit 3\")\n        result = cur.fetchall()\n        return len(list(filter(lambda rec: \"EXECUTE\" in rec[0], result)))\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def run_seed_with_prepared_statements(\n        self, project, trino_connection, expected_num_prepared_statements\n    ):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Check if the seed command is using prepared statements\n        assert (\n            self.retrieve_num_prepared_statements(trino_connection)\n            == expected_num_prepared_statements\n        )\n\n        # run models\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n\n\n@pytest.mark.prepared_statements_disabled\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestPreparedStatementsDisabled(PreparedStatementsBase):\n    def test_run_seed_with_prepared_statements_disabled(self, project, trino_connection):\n        self.run_seed_with_prepared_statements(project, trino_connection, 0)\n\n\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestPreparedStatementsEnabled(PreparedStatementsBase):\n    def test_run_seed_with_prepared_statements_enabled(self, project, trino_connection):\n        self.run_seed_with_prepared_statements(project, trino_connection, 1)\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_snapshot.py",
    "content": "import pytest\nfrom dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols\nfrom dbt.tests.adapter.basic.test_snapshot_timestamp import (\n    BaseSnapshotTimestamp,\n    check_relation_rows,\n)\nfrom dbt.tests.util import get_relation_columns, run_dbt\n\nseeds_base_csv = \"\"\"\nid,name,some_date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n5,Hannah,1982-06-23 05:41:26\n6,Eleanor,1991-08-10 23:12:21\n7,Lily,1971-03-29 14:58:02\n8,Jonathan,1988-02-26 02:55:24\n9,Adrian,1994-02-09 13:14:23\n10,Nora,1976-03-01 16:51:39\n\"\"\".lstrip()\n\n\nseeds_added_csv = (\n    seeds_base_csv\n    + \"\"\"\n11,Mateo,2014-09-07 17:04:27\n12,Julian,2000-02-04 11:48:30\n13,Gabriel,2001-07-10 07:32:52\n14,Isaac,2002-11-24 03:22:28\n15,Levi,2009-11-15 11:57:15\n16,Elizabeth,2005-04-09 03:50:11\n17,Grayson,2019-08-06 19:28:17\n18,Dylan,2014-03-01 11:50:41\n19,Jayden,2009-06-06 07:12:49\n20,Luke,2003-12-05 21:42:18\n\"\"\".lstrip()\n)\n\nseeds_newcolumns_csv = \"\"\"\nid,name,some_date,last_initial,new_date\n1,Easton,1981-05-20 06:46:51,A,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33,B,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51,C,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35,D,1976-05-06 20:21:35\n5,Hannah,1982-06-23 05:41:26,E,1982-06-23 05:41:26\n6,Eleanor,1991-08-10 23:12:21,F,1991-08-10 23:12:21\n7,Lily,1971-03-29 14:58:02,G,1971-03-29 14:58:02\n8,Jonathan,1988-02-26 02:55:24,H,1988-02-26 02:55:24\n9,Adrian,1994-02-09 13:14:23,I,1994-02-09 13:14:23\n10,Nora,1976-03-01 16:51:39,J,1976-03-01 16:51:39\n\"\"\".lstrip()\n\niceberg_macro_override_sql = \"\"\"\\\n{% macro trino__current_timestamp() -%}\n    current_timestamp(6)\n{%- endmacro %}\n\"\"\"\n\n\nclass BaseTrinoSnapshotTimestamp(BaseSnapshotTimestamp):\n    def test_snapshot_timestamp(self, project):\n        super().test_snapshot_timestamp(project)\n\n        run_dbt([\"snapshot\", \"--vars\", \"seed_name: newcolumns\"])\n\n        # snapshot still has 30 rows because timestamp not updated\n        check_relation_rows(project, \"ts_snapshot\", 30)\n\n        # snapshot now has an additional column \"last_initial\"\n        ts_snapshot_columns = map(\n            lambda x: x[0], get_relation_columns(project.adapter, \"ts_snapshot\")\n        )\n        assert \"last_initial\" in ts_snapshot_columns\n        assert \"new_date\" in ts_snapshot_columns\n\n\n@pytest.mark.iceberg\nclass TestIcebergSnapshotCheckColsTrino(BaseSnapshotCheckCols):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"snapshot_strategy_check_cols\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n            \"added.csv\": seeds_added_csv,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def macros(self):\n        return {\"iceberg.sql\": iceberg_macro_override_sql}\n\n\n@pytest.mark.iceberg\nclass TestIcebergSnapshotTimestampTrino(BaseTrinoSnapshotTimestamp):\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n            \"newcolumns.csv\": seeds_newcolumns_csv,\n            \"added.csv\": seeds_added_csv,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"snapshot_strategy_timestamp\",\n            \"seeds\": {\n                \"+column_types\": {\n                    \"some_date\": \"timestamp(6)\",\n                    \"new_date\": \"timestamp(6)\",\n                },\n            },\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaSnapshotCheckColsTrino(BaseSnapshotCheckCols):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"snapshot_strategy_check_cols\",\n            \"seeds\": {\n                \"+column_types\": {\n                    \"some_date\": \"timestamp(3) with time zone\",\n                    \"new_date\": \"timestamp(3) with time zone\",\n                },\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n            \"added.csv\": seeds_added_csv,\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaSnapshotTimestampTrino(BaseTrinoSnapshotTimestamp):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"snapshot_strategy_timestamp\",\n            \"seeds\": {\n                \"+column_types\": {\n                    \"some_date\": \"timestamp(3) with time zone\",\n                    \"new_date\": \"timestamp(3) with time zone\",\n                },\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n            \"newcolumns.csv\": seeds_newcolumns_csv,\n            \"added.csv\": seeds_added_csv,\n        }\n\n\nclass TestSnapshotLocationPropertyExceptionTrino(BaseSnapshotCheckCols):\n    \"\"\"\n    Specifying 'location' property in snapshots is not supported.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"snapshot_location_property_exception\",\n            \"snapshots\": {\n                \"+properties\": {\n                    \"location\": \"some_location\",\n                },\n            },\n        }\n\n    def test_snapshot_check_cols(self, project):\n        results = run_dbt([\"snapshot\"], expect_pass=False)\n        for result in results:\n            assert result.status == \"error\"\n            assert \"Specifying 'location' property in snapshots is not supported\" in result.message\n"
  },
  {
    "path": "tests/functional/adapter/materialization/test_view_security.py",
    "content": "import pytest\nfrom dbt.tests.util import check_relations_equal, run_dbt\n\nfrom tests.functional.adapter.materialization.fixtures import (\n    model_sql,\n    profile_yml,\n    seed_csv,\n)\n\n\nclass TestViewSecurity:\n    \"\"\"\n    Testing view_security = 'invoker' configuration for view materialization,\n    using dbt seed, run and tests commands and validate data load correctness.\n    \"\"\"\n\n    # configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"view_security\",\n            \"models\": {\"+materialized\": \"view\", \"+view_security\": \"invoker\"},\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"materialization.sql\": model_sql,\n            \"materialization.yml\": profile_yml,\n        }\n\n    # The actual sequence of dbt commands and assertions\n    # pytest will take care of all \"setup\" + \"teardown\"\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        # run models\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 3\n\n        # check if the data was loaded correctly\n        check_relations_equal(project.adapter, [\"seed\", \"materialization\"])\n"
  },
  {
    "path": "tests/functional/adapter/materialized_view_tests/test_materialized_view_dbt_core.py",
    "content": "from typing import Optional, Tuple\n\nimport pytest\nfrom dbt.adapters.base.relation import BaseRelation\nfrom dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic\nfrom dbt.tests.util import get_model_file, run_dbt, run_sql_with_adapter, set_model_file\n\nfrom tests.functional.adapter.materialized_view_tests.utils import query_relation_type\n\n\n@pytest.mark.iceberg\nclass TestTrinoMaterializedViewsBasic(MaterializedViewBasic):\n    @staticmethod\n    def insert_record(project, table: BaseRelation, record: Tuple[int, int]):\n        my_id, value = record\n        project.run_sql(f\"insert into {table} (id, value) values ({my_id}, {value})\")\n\n    @staticmethod\n    def refresh_materialized_view(project, materialized_view: BaseRelation):\n        sql = f\"refresh materialized view {materialized_view}\"\n        project.run_sql(sql)\n\n    @staticmethod\n    def query_row_count(project, relation: BaseRelation) -> int:\n        sql = f\"select count(*) from {relation}\"\n        return project.run_sql(sql, fetch=\"one\")[0]\n\n    @staticmethod\n    def query_relation_type(project, relation: BaseRelation) -> Optional[str]:\n        return query_relation_type(project, relation)\n\n    # TODO: remove `setup` fixture when CASCADE will be supported in Iceberg, delta, hive connectors\n    @pytest.fixture(scope=\"function\", autouse=True)\n    def setup(self, project, my_materialized_view):\n        run_dbt([\"seed\"])\n        run_dbt([\"run\", \"--models\", my_materialized_view.identifier, \"--full-refresh\"])\n\n        # the tests touch these files, store their contents in memory\n        initial_model = get_model_file(project, my_materialized_view)\n\n        yield\n\n        # and then reset them after the test runs\n        set_model_file(project, my_materialized_view, initial_model)\n\n        # Drop materialized views first, then drop schema\n        sql = \"select * from system.metadata.materialized_views\"\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n        for mv in results:\n            project.run_sql(f\"drop materialized view {mv[0]}.{mv[1]}.{mv[2]}\")\n\n        relation = project.adapter.Relation.create(\n            database=project.database, schema=project.test_schema\n        )\n        project.adapter.drop_schema(relation)\n\n    @pytest.mark.skip(\n        reason=\"\"\"\n    on iceberg:\n    If the data is outdated, the materialized view behaves like a normal view,\n    and the data is queried directly from the base tables.\n    https://trino.io/docs/current/connector/iceberg.html#materialized-views\n    \"\"\"\n    )\n    def test_materialized_view_only_updates_after_refresh(self):\n        pass\n"
  },
  {
    "path": "tests/functional/adapter/materialized_view_tests/utils.py",
    "content": "from typing import Optional\n\nfrom dbt.adapters.base.relation import BaseRelation\n\nfrom dbt.adapters.trino.relation import TrinoRelation\n\n\ndef query_relation_type(project, relation: BaseRelation) -> Optional[str]:\n    assert isinstance(relation, TrinoRelation)\n    sql = f\"\"\"\n    select\n      case when mv.name is not null then 'materialized_view'\n           when t.table_type = 'BASE TABLE' then 'table'\n           when t.table_type = 'VIEW' then 'view'\n           else t.table_type\n      end as table_type\n    from {relation.information_schema()}.tables t\n    left join system.metadata.materialized_views mv\n          on mv.catalog_name = t.table_catalog and mv.schema_name = t.table_schema and mv.name = t.table_name\n    where t.table_schema = '{relation.schema.lower()}'\n          and (mv.catalog_name is null or mv.catalog_name =  '{relation.database.lower()}')\n          and (mv.schema_name is null or mv.schema_name =  '{relation.schema.lower()}')\n          and t.table_name = '{relation.identifier.lower()}'\n    \"\"\"\n    results = project.run_sql(sql, fetch=\"all\")\n    if len(results) == 0:\n        return None\n    elif len(results) > 1:\n        raise ValueError(f\"More than one instance of {relation.name} found!\")\n    else:\n        return results[0][0]\n"
  },
  {
    "path": "tests/functional/adapter/persist_docs/fixtures.py",
    "content": "seed_csv = \"\"\"\nid,name,date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n\"\"\".lstrip()\n\ntable_model = \"\"\"\n{{config(materialized = \"table\")}}\nselect * from {{ ref('seed') }}\n\"\"\"\n\nview_model = \"\"\"\n{{config(materialized = \"view\")}}\nselect * from {{ ref('seed') }}\n\"\"\"\n\nincremental_model = \"\"\"\n{{config(materialized = \"incremental\")}}\nselect * from {{ ref('seed') }}\n\"\"\"\n\ntable_profile_yml = \"\"\"\nversion: 2\nmodels:\n  - name: table_model\n    description: |\n      Table model description \"with double quotes\"\n      and with 'single  quotes' as well as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        description: |\n          id Column description \"with double quotes\"\n          and with 'single  quotes' as well as other;\n          '''abc123'''\n          reserved -- characters\n          --\n          /* comment */\n          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n        tests:\n          - unique\n          - not_null\n      - name: name\n        description: |\n          Fancy column description\n        tests:\n          - not_null\nseeds:\n  - name: seed\n    description: |\n      Seed model description \"with double quotes\"\n      and with 'single  quotes' as welll as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        description: |\n          id Column description \"with double quotes\"\n          and with 'single  quotes' as welll as other;\n          '''abc123'''\n          reserved -- characters\n          --\n          /* comment */\n          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n      - name: name\n        description: |\n          Fancy column description\n        tests:\n          - not_null\n\"\"\"\n\n\nview_profile_yml = \"\"\"\nversion: 2\nmodels:\n  - name: view_model\n    description: |\n      Table model description \"with double quotes\"\n      and with 'single  quotes' as well as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        tests:\n          - unique\n          - not_null\n        description: ID Column description\n      - name: name\n        tests:\n          - not_null\n        description: Name description\nseeds:\n  - name: seed\n    description: |\n      Seed model description \"with double quotes\"\n      and with 'single  quotes' as welll as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        description: |\n          id Column description \"with double quotes\"\n          and with 'single  quotes' as welll as other;\n          '''abc123'''\n          reserved -- characters\n          --\n          /* comment */\n          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n      - name: name\n        description: |\n          Fancy column description\n        tests:\n          - not_null\n\"\"\"\n\nincremental_profile_yml = \"\"\"\nversion: 2\nmodels:\n  - name: incremental_model\n    description: |\n      Table model description \"with double quotes\"\n      and with 'single  quotes' as well as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        description: |\n          id Column description \"with double quotes\"\n          and with 'single  quotes' as well as other;\n          '''abc123'''\n          reserved -- characters\n          --\n          /* comment */\n          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n        tests:\n          - unique\n          - not_null\n      - name: name\n        description: |\n          Fancy column description\n        tests:\n          - not_null\nseeds:\n  - name: seed\n    description: |\n      Seed model description \"with double quotes\"\n      and with 'single  quotes' as welll as other;\n      '''abc123'''\n      reserved -- characters\n      --\n      /* comment */\n      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n    columns:\n      - name: id\n        description: |\n          id Column description \"with double quotes\"\n          and with 'single  quotes' as welll as other;\n          '''abc123'''\n          reserved -- characters\n          --\n          /* comment */\n          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting\n      - name: name\n        description: |\n          Fancy column description\n        tests:\n          - not_null\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/persist_docs/test_persist_docs.py",
    "content": "import pytest\nfrom dbt.tests.adapter.persist_docs.test_persist_docs import (\n    BasePersistDocs,\n    BasePersistDocsBase,\n    BasePersistDocsColumnMissing,\n    BasePersistDocsCommentOnQuotedColumn,\n)\nfrom dbt.tests.util import run_dbt, run_sql_with_adapter\n\nfrom tests.functional.adapter.persist_docs.fixtures import (\n    incremental_model,\n    incremental_profile_yml,\n    seed_csv,\n    table_model,\n    table_profile_yml,\n    view_model,\n    view_profile_yml,\n)\n\n\n@pytest.mark.iceberg\nclass TestPersistDocsBase:\n    \"\"\"\n    Testing persist_docs functionality\n    \"\"\"\n\n    @property\n    def schema(self):\n        return \"default\"\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n\nclass TestPersistDocsTable(TestPersistDocsBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"persist_docs_tests\",\n            \"models\": {\"+persist_docs\": {\"relation\": True, \"columns\": True}},\n            \"seeds\": {\n                \"+column_types\": {\"date\": \"timestamp(6)\"},\n                \"+persist_docs\": {\"relation\": True, \"columns\": True},\n            },\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"table_model.sql\": table_model,\n            \"table_persist_docs.yml\": table_profile_yml,\n        }\n\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 4\n\n\nclass TestPersistDocsView(TestPersistDocsBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"persist_docs_tests\",\n            \"models\": {\n                \"+persist_docs\": {\"relation\": True, \"columns\": True},\n                \"+materialized\": \"view\",\n                \"+view_security\": \"definer\",\n            },\n            \"seeds\": {\n                \"+column_types\": {\"date\": \"timestamp(6)\"},\n                \"+persist_docs\": {\"relation\": True, \"columns\": True},\n            },\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"view_model.sql\": view_model,\n            \"view_persist_docs.yml\": view_profile_yml,\n        }\n\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 4\n\n\nclass TestPersistDocsIncremental(TestPersistDocsBase):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"persist_docs_tests\",\n            \"models\": {\"+persist_docs\": {\"relation\": True, \"columns\": True}},\n            \"seeds\": {\n                \"+column_types\": {\"date\": \"timestamp(6)\"},\n                \"+persist_docs\": {\"relation\": True, \"columns\": True},\n            },\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"incremental_model.sql\": incremental_model,\n            \"incremental_persist_docs.yml\": incremental_profile_yml,\n        }\n\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 4\n\n\n# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'\n# results in an error. This happens only for views.\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestPersistDocs(BasePersistDocs):\n    pass\n\n\nclass TestPersistDocsColumnMissing(BasePersistDocsColumnMissing):\n    pass\n\n\nclass TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn):\n    pass\n\n\nclass BasePersistDocsDisabled(BasePersistDocsBase):\n    def test_persist_docs_disabled(self, project):\n        sql = f\"\"\"select * from system.metadata.table_comments\n        where catalog_name = '{project.database}'\n        and schema_name = '{project.test_schema}'\n        and table_name = 'table_model'\n        and comment is not null\n        \"\"\"\n        result = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n        assert len(result) == 0\n\n\nclass TestPersistDocsDisabledByDefault(BasePersistDocsDisabled):\n    \"\"\"\n    Without providing `persist_docs` config, table comments shouldn't be added by default.\n    \"\"\"\n\n    pass\n\n\n# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'\n# results in an error. This happens only for views.\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestPersistDocsRelationSetToFalse(BasePersistDocsDisabled):\n    \"\"\"\n    With `persist_docs.relation` config set to False, table comments shouldn't be added.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"test\": {\n                    \"+persist_docs\": {\n                        \"relation\": False,\n                        \"columns\": True,\n                    },\n                }\n            }\n        }\n\n\n# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'\n# results in an error. This happens only for views.\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestPersistDocsRelationNotSet(BasePersistDocsDisabled):\n    \"\"\"\n    Without providing `persist_docs.relation` config, table comments shouldn't be added by default.\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\n                \"test\": {\n                    \"+persist_docs\": {\n                        \"columns\": True,\n                    },\n                }\n            }\n        }\n"
  },
  {
    "path": "tests/functional/adapter/show/fixtures.py",
    "content": "models__sample_model = \"\"\"\nselect * from {{ ref('sample_seed') }}\n\"\"\"\n\nmodels__second_model = \"\"\"\nselect\n    sample_num as col_one,\n    sample_bool as col_two,\n    42 as answer\nfrom {{ ref('sample_model') }}\n\"\"\"\n\nmodels__sql_header = \"\"\"\n{% call set_sql_header(config) %}\nset time zone 'Asia/Kolkata';\n{%- endcall %}\nselect current_timezone() as timezone\n\"\"\"\n\nprivate_model_yml = \"\"\"\ngroups:\n  - name: my_cool_group\n    owner: {name: me}\n\nmodels:\n  - name: private_model\n    access: private\n    config:\n      group: my_cool_group\n\"\"\"\n\n\nschema_yml = \"\"\"\nmodels:\n  - name: sample_model\n    latest_version: 1\n\n    # declare the versions, and fully specify them\n    versions:\n      - v: 2\n        config:\n          materialized: table\n        columns:\n          - name: sample_num\n            data_type: int\n          - name: sample_bool\n            data_type: boolean\n          - name: answer\n            data_type: int\n\n      - v: 1\n        config:\n          materialized: table\n          contract: {enforced: true}\n        columns:\n          - name: sample_num\n            data_type: int\n          - name: sample_bool\n            data_type: boolean\n\"\"\"\n\nmodels__ephemeral_model = \"\"\"\n{{ config(materialized = 'ephemeral') }}\nselect\n    coalesce(sample_num, 0) + 10 as col_deci\nfrom {{ ref('sample_model') }}\n\"\"\"\n\nmodels__second_ephemeral_model = \"\"\"\n{{ config(materialized = 'ephemeral') }}\nselect\n    col_deci + 100 as col_hundo\nfrom {{ ref('ephemeral_model') }}\n\"\"\"\n\nseeds__sample_seed = \"\"\"sample_num,sample_bool\n1,true\n2,false\n3,true\n4,false\n5,true\n6,false\n7,true\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/show/test_show.py",
    "content": "import pytest\nfrom dbt.tests.util import run_dbt, run_dbt_and_capture\nfrom dbt_common.exceptions import DbtBaseException, DbtRuntimeError\n\nfrom tests.functional.adapter.show.fixtures import (\n    models__ephemeral_model,\n    models__sample_model,\n    models__second_ephemeral_model,\n    models__second_model,\n    models__sql_header,\n    private_model_yml,\n    schema_yml,\n    seeds__sample_seed,\n)\n\n\nclass TestShow:\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"sample_model.sql\": models__sample_model,\n            \"second_model.sql\": models__second_model,\n            \"ephemeral_model.sql\": models__ephemeral_model,\n            \"sql_header.sql\": models__sql_header,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"sample_seed.csv\": seeds__sample_seed}\n\n    def test_none(self, project):\n        with pytest.raises(\n            DbtRuntimeError, match=\"Either --select or --inline must be passed to show\"\n        ):\n            run_dbt([\"seed\"])\n            run_dbt([\"show\"])\n\n    def test_select_model_text(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"second_model\"])\n        assert \"Previewing node 'sample_model'\" not in log_output\n        assert \"Previewing node 'second_model'\" in log_output\n        assert \"col_one\" in log_output\n        assert \"col_two\" in log_output\n        assert \"answer\" in log_output\n\n    def test_select_multiple_model_text(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture(\n            [\"show\", \"--select\", \"sample_model second_model\"]\n        )\n        assert \"Previewing node 'sample_model'\" in log_output\n        assert \"sample_num\" in log_output\n        assert \"sample_bool\" in log_output\n\n    def test_select_single_model_json(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture(\n            [\"show\", \"--select\", \"sample_model\", \"--output\", \"json\"]\n        )\n        assert \"Previewing node 'sample_model'\" not in log_output\n        assert \"sample_num\" in log_output\n        assert \"sample_bool\" in log_output\n\n    def test_inline_pass(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture(\n            [\"show\", \"--inline\", \"select * from {{ ref('sample_model') }}\"]\n        )\n        assert \"Previewing inline node\" in log_output\n        assert \"sample_num\" in log_output\n        assert \"sample_bool\" in log_output\n\n    def test_inline_fail(self, project):\n        with pytest.raises(DbtBaseException, match=\"Error parsing inline query\"):\n            run_dbt([\"show\", \"--inline\", \"select * from {{ ref('third_model') }}\"])\n\n    def test_inline_fail_database_error(self, project):\n        with pytest.raises(DbtRuntimeError, match=\"Database Error\"):\n            run_dbt([\"show\", \"--inline\", \"slect asdlkjfsld;j\"])\n\n    def test_ephemeral_model(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"ephemeral_model\"])\n        assert \"col_deci\" in log_output\n\n    def test_second_ephemeral_model(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture(\n            [\"show\", \"--inline\", models__second_ephemeral_model]\n        )\n        assert \"col_hundo\" in log_output\n\n    # test_limit tests ConnectionWrapper.fetchmany()\n    @pytest.mark.parametrize(\n        \"args,expected\",\n        [\n            ([], 5),  # default limit\n            ([\"--limit\", 3], 3),  # fetch 3 rows\n            ([\"--limit\", -1], 7),  # fetch all rows\n        ],\n    )\n    def test_limit(self, project, args, expected):\n        run_dbt([\"build\"])\n        dbt_args = [\"show\", \"--inline\", models__second_ephemeral_model, *args]\n        results, log_output = run_dbt_and_capture(dbt_args)\n        assert len(results.results[0].agate_table) == expected\n\n    def test_seed(self, project):\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"sample_seed\"])\n        assert \"Previewing node 'sample_seed'\" in log_output\n\n    def test_sql_header(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"sql_header\"])\n        assert \"Asia/Kolkata\" in log_output\n\n\nclass TestShowModelVersions:\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"schema.yml\": schema_yml,\n            \"sample_model.sql\": models__sample_model,\n            \"sample_model_v2.sql\": models__second_model,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"sample_seed.csv\": seeds__sample_seed}\n\n    def test_version_unspecified(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"sample_model\"])\n        assert \"Previewing node 'sample_model.v1'\" in log_output\n        assert \"Previewing node 'sample_model.v2'\" in log_output\n\n    def test_none(self, project):\n        run_dbt([\"build\"])\n        (results, log_output) = run_dbt_and_capture([\"show\", \"--select\", \"sample_model.v2\"])\n        assert \"Previewing node 'sample_model.v1'\" not in log_output\n        assert \"Previewing node 'sample_model.v2'\" in log_output\n\n\nclass TestShowPrivateModel:\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"schema.yml\": private_model_yml,\n            \"private_model.sql\": models__sample_model,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"sample_seed.csv\": seeds__sample_seed}\n\n    def test_version_unspecified(self, project):\n        run_dbt([\"build\"])\n        run_dbt([\"show\", \"--inline\", \"select * from {{ ref('private_model') }}\"])\n"
  },
  {
    "path": "tests/functional/adapter/simple_seed/seed_bom.csv",
    "content": "﻿seed_id,first_name,email,ip_address,birthday\n1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31\n2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14\n3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57\n4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43\n5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29\n6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50\n7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59\n8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15\n9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48\n10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49\n11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11\n12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14\n13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36\n14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04\n15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56\n16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47\n17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35\n18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57\n19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15\n20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05\n21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49\n22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56\n23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43\n24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44\n25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02\n26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25\n27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18\n28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37\n29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12\n30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23\n31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50\n32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27\n33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44\n34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05\n35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39\n36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52\n37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25\n38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51\n39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29\n40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54\n41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56\n42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31\n43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28\n44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07\n45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15\n46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45\n47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19\n48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24\n49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48\n50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49\n51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12\n52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23\n53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51\n54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19\n55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40\n56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32\n57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09\n58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40\n59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27\n60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06\n61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44\n62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47\n63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37\n64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24\n65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56\n66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35\n67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45\n68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23\n69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21\n70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46\n71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20\n72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54\n73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55\n74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24\n75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30\n76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01\n77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42\n78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33\n79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52\n80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24\n81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56\n82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59\n83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36\n84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27\n85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42\n86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39\n87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49\n88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44\n89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39\n90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04\n91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19\n92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14\n93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49\n94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54\n95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19\n96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10\n97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45\n98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58\n99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55\n100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39\n101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23\n102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36\n103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01\n104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29\n105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20\n106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39\n107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02\n108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03\n109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23\n110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25\n111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21\n112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46\n113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17\n114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51\n115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54\n116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30\n117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42\n118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44\n119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19\n120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15\n121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34\n122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24\n123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21\n124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51\n125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24\n126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34\n127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55\n128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32\n129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43\n130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50\n131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16\n132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35\n133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54\n134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10\n135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52\n136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30\n137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33\n138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44\n139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19\n140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53\n141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23\n142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32\n143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17\n144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41\n145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57\n146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58\n147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32\n148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46\n149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04\n150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45\n151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04\n152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10\n153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00\n154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36\n155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09\n156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54\n157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41\n158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15\n159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38\n160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42\n161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23\n162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34\n163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18\n164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54\n165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46\n166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14\n167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13\n168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27\n169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58\n170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31\n171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44\n172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51\n173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04\n174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38\n175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15\n176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15\n177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46\n178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14\n179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19\n180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14\n181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16\n182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37\n183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25\n184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29\n185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07\n186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45\n187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50\n188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48\n189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37\n190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28\n191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50\n192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09\n193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40\n194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43\n195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20\n196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34\n197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20\n198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29\n199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53\n200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40\n201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26\n202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00\n203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09\n204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33\n205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19\n206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51\n207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26\n208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28\n209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32\n210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22\n211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44\n212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09\n213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07\n214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21\n215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16\n216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40\n217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30\n218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34\n219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56\n220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11\n221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04\n222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02\n223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04\n224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29\n225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56\n226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31\n227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29\n228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10\n229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56\n230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47\n231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01\n232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01\n233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24\n234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56\n235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47\n236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45\n237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20\n238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59\n239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44\n240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41\n241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31\n242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50\n243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17\n244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36\n245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13\n246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54\n247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18\n248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18\n249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44\n250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09\n251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54\n252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50\n253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31\n254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31\n255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00\n256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28\n257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02\n258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34\n259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05\n260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00\n261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37\n262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19\n263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10\n264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40\n265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12\n266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24\n267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19\n268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06\n269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28\n270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31\n271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40\n272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33\n273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35\n274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59\n275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36\n276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06\n277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29\n278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24\n279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57\n280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22\n281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09\n282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29\n283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37\n284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51\n285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43\n286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17\n287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06\n288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57\n289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40\n290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44\n291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33\n292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49\n293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57\n294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21\n295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13\n296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10\n297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08\n298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28\n299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04\n300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31\n301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31\n302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46\n303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13\n304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39\n305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34\n306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05\n307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32\n308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40\n309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21\n310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55\n311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32\n312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33\n313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05\n314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27\n315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26\n316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51\n317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36\n318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08\n319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43\n320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43\n321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50\n322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03\n323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25\n324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56\n325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44\n326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11\n327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55\n328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27\n329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31\n330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24\n331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16\n332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41\n333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52\n334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58\n335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16\n336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07\n337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13\n338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47\n339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43\n340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18\n341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48\n342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49\n343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09\n344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07\n345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14\n346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42\n347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13\n348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06\n349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52\n350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58\n351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06\n352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12\n353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04\n354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58\n355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57\n356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17\n357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24\n358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06\n359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57\n360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34\n361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32\n362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41\n363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59\n364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22\n365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26\n366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27\n367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05\n368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15\n369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36\n370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23\n371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42\n372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01\n373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17\n374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59\n375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58\n376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55\n377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08\n378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01\n379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37\n380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07\n381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02\n382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43\n383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46\n384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18\n385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51\n386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52\n387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49\n388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29\n389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18\n390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50\n391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35\n392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39\n393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18\n394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01\n395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28\n396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55\n397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51\n398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43\n399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35\n400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12\n401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09\n402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49\n403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20\n404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25\n405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03\n406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13\n407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39\n408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26\n409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27\n410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01\n411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07\n412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51\n413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30\n414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08\n415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04\n416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02\n417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41\n418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21\n419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41\n420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35\n421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47\n422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41\n423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19\n424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10\n425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10\n426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28\n427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44\n428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55\n429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20\n430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20\n431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04\n432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49\n433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29\n434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45\n435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34\n436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17\n437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46\n438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36\n439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40\n440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05\n441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04\n442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21\n443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20\n444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23\n445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07\n446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48\n447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01\n448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00\n449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54\n450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36\n451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03\n452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26\n453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03\n454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21\n455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59\n456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59\n457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45\n458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40\n459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43\n460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30\n461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44\n462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06\n463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47\n464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19\n465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54\n466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57\n467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37\n468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47\n469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44\n470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10\n471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04\n472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51\n473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37\n474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37\n475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45\n476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20\n477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06\n478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52\n479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05\n480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25\n481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32\n482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07\n483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00\n484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38\n485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24\n486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59\n487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46\n488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09\n489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06\n490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05\n491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23\n492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56\n493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18\n494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49\n495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14\n496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29\n497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25\n498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53\n499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34\n500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20\n"
  },
  {
    "path": "tests/functional/adapter/simple_seed/seeds.py",
    "content": "trino_seeds__expected_sql_create_table = \"\"\"\ncreate table {schema}.seed_expected (\nseed_id INTEGER,\nfirst_name VARCHAR,\nemail VARCHAR,\nip_address VARCHAR,\nbirthday TIMESTAMP WITHOUT TIME ZONE\n)\n\"\"\"\n\ntrino_seeds__expected_sql_insert_into = \"\"\"\nINSERT INTO {schema}.seed_expected\n    (\"seed_id\",\"first_name\",\"email\",\"ip_address\",\"birthday\")\nVALUES\n    (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194',TIMESTAMP '2008-09-12 19:08:31'),\n    (2,'Larry','lperkins1@toplist.cz','64.210.133.162',TIMESTAMP '1978-05-09 04:15:14'),\n    (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114',TIMESTAMP '2011-10-16 04:07:57'),\n    (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98',TIMESTAMP '1973-07-19 10:52:43'),\n    (5,'Fred','fwoods4@google.cn','78.229.170.124',TIMESTAMP '2012-09-30 16:38:29'),\n    (6,'Stephen','shanson5@livejournal.com','182.227.157.105',TIMESTAMP '1995-11-07 21:40:50'),\n    (7,'William','wmartinez6@upenn.edu','135.139.249.50',TIMESTAMP '1982-09-05 03:11:59'),\n    (8,'Jessica','jlong7@hao123.com','203.62.178.210',TIMESTAMP '1991-10-16 11:03:15'),\n    (9,'Douglas','dwhite8@tamu.edu','178.187.247.1',TIMESTAMP '1979-10-01 09:49:48'),\n    (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249',TIMESTAMP '2011-05-26 07:45:49'),\n    (11,'Ralph','rfieldsa@home.pl','55.152.163.149',TIMESTAMP '1972-11-18 19:06:11'),\n    (12,'Louise','lnicholsb@samsung.com','141.116.153.154',TIMESTAMP '2014-11-25 20:56:14'),\n    (13,'Clarence','cduncanc@sfgate.com','81.171.31.133',TIMESTAMP '2011-11-17 07:02:36'),\n    (14,'Daniel','dfranklind@omniture.com','8.204.211.37',TIMESTAMP '1980-09-13 00:09:04'),\n    (15,'Katherine','klanee@auda.org.au','176.96.134.59',TIMESTAMP '1997-08-22 19:36:56'),\n    (16,'Billy','bwardf@wikia.com','214.108.78.85',TIMESTAMP '2003-10-19 02:14:47'),\n    (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70',TIMESTAMP '1988-10-28 15:12:35'),\n    (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84',TIMESTAMP '1988-08-24 10:50:57'),\n    (19,'Roger','rfrazieri@scribd.com','38.145.218.108',TIMESTAMP '1985-12-31 15:17:15'),\n    (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17',TIMESTAMP '1970-06-08 02:09:05'),\n    (21,'Aaron','arodriguezk@nps.gov','205.245.118.221',TIMESTAMP '1985-10-11 23:07:49'),\n    (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182',TIMESTAMP '2006-03-29 12:53:56'),\n    (23,'Phillip','pmorenom@intel.com','41.38.254.103',TIMESTAMP '2011-11-07 15:35:43'),\n    (24,'Henry','hgarcian@newsvine.com','1.191.216.252',TIMESTAMP '2008-08-28 08:30:44'),\n    (25,'Irene','iturnero@opera.com','50.17.60.190',TIMESTAMP '1994-04-01 07:15:02'),\n    (26,'Andrew','adunnp@pen.io','123.52.253.176',TIMESTAMP '2000-11-01 06:03:25'),\n    (27,'David','dgutierrezq@wp.com','238.23.203.42',TIMESTAMP '1988-01-25 07:29:18'),\n    (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185',TIMESTAMP '1983-01-01 13:36:37'),\n    (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119',TIMESTAMP '1979-07-16 17:24:12'),\n    (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88',TIMESTAMP '2001-04-03 10:00:23'),\n    (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47',TIMESTAMP '1986-02-11 21:35:50'),\n    (32,'Earl','eortizv@opera.com','166.47.248.240',TIMESTAMP '1996-07-06 08:16:27'),\n    (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200',TIMESTAMP '1987-01-31 03:46:44'),\n    (34,'Diane','dhowellx@nyu.edu','140.94.133.12',TIMESTAMP '1994-06-11 02:30:05'),\n    (35,'Randy','rkennedyy@microsoft.com','73.255.34.196',TIMESTAMP '2005-05-26 20:28:39'),\n    (36,'Janice','jriveraz@time.com','22.214.227.32',TIMESTAMP '1990-02-09 04:16:52'),\n    (37,'Laura','lperry10@diigo.com','159.148.145.73',TIMESTAMP '2015-03-17 05:59:25'),\n    (38,'Gary','gray11@statcounter.com','40.193.124.56',TIMESTAMP '1970-01-27 10:04:51'),\n    (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103',TIMESTAMP '2009-03-14 08:14:29'),\n    (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239',TIMESTAMP '1993-05-21 14:08:54'),\n    (41,'Scott','smoore14@archive.org','38.238.46.83',TIMESTAMP '1980-08-30 11:16:56'),\n    (42,'Phillip','pevans15@cisco.com','158.234.59.34',TIMESTAMP '2011-12-15 23:26:31'),\n    (43,'Steven','sriley16@google.ca','90.247.57.68',TIMESTAMP '2011-10-29 19:03:28'),\n    (44,'Deborah','dbrown17@hexun.com','179.125.143.240',TIMESTAMP '1995-04-10 14:36:07'),\n    (45,'Lori','lross18@ow.ly','64.80.162.180',TIMESTAMP '1980-12-27 16:49:15'),\n    (46,'Sean','sjackson19@tumblr.com','240.116.183.69',TIMESTAMP '1988-06-12 21:24:45'),\n    (47,'Terry','tbarnes1a@163.com','118.38.213.137',TIMESTAMP '1997-09-22 16:43:19'),\n    (48,'Dorothy','dross1b@ebay.com','116.81.76.49',TIMESTAMP '2005-02-28 13:33:24'),\n    (49,'Samuel','swashington1c@house.gov','38.191.253.40',TIMESTAMP '1989-01-19 21:15:48'),\n    (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174',TIMESTAMP '2007-08-11 10:21:49'),\n    (51,'Wayne','whudson1e@princeton.edu','90.61.24.102',TIMESTAMP '1983-07-03 16:58:12'),\n    (52,'Rose','rjames1f@plala.or.jp','240.83.81.10',TIMESTAMP '1995-06-08 11:46:23'),\n    (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145',TIMESTAMP '2016-09-19 14:45:51'),\n    (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94',TIMESTAMP '1976-08-17 11:26:19'),\n    (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45',TIMESTAMP '2006-05-27 16:51:40'),\n    (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23',TIMESTAMP '1975-11-17 08:42:32'),\n    (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198',TIMESTAMP '1997-08-06 11:51:09'),\n    (58,'Helen','hlarson1l@prweb.com','8.40.21.39',TIMESTAMP '1993-08-04 19:53:40'),\n    (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15',TIMESTAMP '1977-08-03 16:37:27'),\n    (60,'Joseph','jspencer1n@marriott.com','13.15.63.238',TIMESTAMP '2005-07-23 20:22:06'),\n    (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190',TIMESTAMP '1976-05-19 21:47:44'),\n    (62,'Joan','jwebb1p@google.ru','105.229.170.71',TIMESTAMP '1972-09-07 17:53:47'),\n    (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244',TIMESTAMP '2000-01-01 22:33:37'),\n    (64,'Katherine','khunter1r@smh.com.au','248.168.205.32',TIMESTAMP '1991-01-09 06:40:24'),\n    (65,'Laura','lvasquez1s@wiley.com','128.129.115.152',TIMESTAMP '1997-10-23 12:04:56'),\n    (66,'Juan','jdunn1t@state.gov','44.228.124.51',TIMESTAMP '2004-11-10 05:07:35'),\n    (67,'Judith','jholmes1u@wiley.com','40.227.179.115',TIMESTAMP '1977-08-02 17:01:45'),\n    (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59',TIMESTAMP '2016-03-06 20:07:23'),\n    (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223',TIMESTAMP '1988-09-13 06:07:21'),\n    (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33',TIMESTAMP '1995-03-18 22:32:46'),\n    (71,'Steven','ssims1y@cbslocal.com','104.50.58.255',TIMESTAMP '2001-08-05 21:26:20'),\n    (72,'Betty','bmills1z@arstechnica.com','103.177.214.220',TIMESTAMP '1981-12-14 21:26:54'),\n    (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130',TIMESTAMP '2000-04-19 10:13:55'),\n    (74,'Donald','dday21@icq.com','9.178.102.255',TIMESTAMP '1972-12-03 00:58:24'),\n    (75,'Eric','ethomas22@addtoany.com','85.2.241.227',TIMESTAMP '1992-11-01 05:59:30'),\n    (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36',TIMESTAMP '1985-10-24 06:50:01'),\n    (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135',TIMESTAMP '2005-10-05 05:17:42'),\n    (78,'Harry','hburton25@youtube.com','156.47.176.237',TIMESTAMP '1978-03-26 05:53:33'),\n    (79,'Kevin','klawrence26@hao123.com','79.136.183.83',TIMESTAMP '1994-10-12 04:38:52'),\n    (80,'David','dhall27@prweb.com','133.149.172.153',TIMESTAMP '1976-12-15 16:24:24'),\n    (81,'Kathy','kperry28@twitter.com','229.242.72.228',TIMESTAMP '1979-03-04 02:58:56'),\n    (82,'Adam','aprice29@elegantthemes.com','13.145.21.10',TIMESTAMP '1982-11-07 11:46:59'),\n    (83,'Brandon','bgriffin2a@va.gov','73.249.128.212',TIMESTAMP '2013-10-30 05:30:36'),\n    (84,'Henry','hnguyen2b@discovery.com','211.36.214.242',TIMESTAMP '1985-01-09 06:37:27'),\n    (85,'Eric','esanchez2c@edublogs.org','191.166.188.251',TIMESTAMP '2004-05-01 23:21:42'),\n    (86,'Jason','jlee2d@jimdo.com','193.92.16.182',TIMESTAMP '1973-01-08 09:05:39'),\n    (87,'Diana','drichards2e@istockphoto.com','19.130.175.245',TIMESTAMP '1994-10-05 22:50:49'),\n    (88,'Andrea','awelch2f@abc.net.au','94.155.233.96',TIMESTAMP '2002-04-26 08:41:44'),\n    (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111',TIMESTAMP '2003-08-25 07:56:39'),\n    (90,'Jane','jsims2h@seesaa.net','43.4.220.135',TIMESTAMP '1987-03-20 20:39:04'),\n    (91,'Larry','lgrant2i@si.edu','97.126.79.34',TIMESTAMP '2000-09-07 20:26:19'),\n    (92,'Louis','ldean2j@prnewswire.com','37.148.40.127',TIMESTAMP '2011-09-16 20:12:14'),\n    (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142',TIMESTAMP '1988-07-15 05:06:49'),\n    (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187',TIMESTAMP '2009-12-15 06:16:54'),\n    (95,'Lori','lstevens2m@icq.com','181.250.181.58',TIMESTAMP '1984-10-28 03:29:19'),\n    (96,'Judy','jsimpson2n@marriott.com','180.121.239.219',TIMESTAMP '1986-02-07 15:18:10'),\n    (97,'Phillip','phoward2o@usa.gov','255.247.0.175',TIMESTAMP '2002-12-26 08:44:45'),\n    (98,'Gloria','gwalker2p@usa.gov','156.140.7.128',TIMESTAMP '1997-10-04 07:58:58'),\n    (99,'Paul','pjohnson2q@umn.edu','183.59.198.197',TIMESTAMP '1991-11-14 12:33:55'),\n    (100,'Frank','fgreene2r@blogspot.com','150.143.68.121',TIMESTAMP '2010-06-12 23:55:39'),\n    (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191',TIMESTAMP '1970-07-08 08:54:23'),\n    (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254',TIMESTAMP '2000-04-12 02:39:36'),\n    (103,'Edward','eburns2u@dailymotion.com','253.89.118.18',TIMESTAMP '1993-10-10 10:54:01'),\n    (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176',TIMESTAMP '1978-08-26 17:07:29'),\n    (105,'Donald','dlawrence2w@wp.com','139.200.159.227',TIMESTAMP '2007-07-21 20:56:20'),\n    (106,'Matthew','mfreeman2x@google.fr','205.26.239.92',TIMESTAMP '2014-12-05 17:05:39'),\n    (107,'Sean','ssanders2y@trellian.com','143.89.82.108',TIMESTAMP '1993-07-14 21:45:02'),\n    (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54',TIMESTAMP '1977-04-06 19:07:03'),\n    (109,'Jennifer','jwatson30@t-online.de','196.102.127.7',TIMESTAMP '1998-03-07 05:12:23'),\n    (110,'Clarence','cbrooks31@si.edu','218.93.234.73',TIMESTAMP '2002-11-06 17:22:25'),\n    (111,'Jose','jflores32@goo.gl','185.105.244.231',TIMESTAMP '1995-01-05 06:32:21'),\n    (112,'George','glee33@adobe.com','173.82.249.196',TIMESTAMP '2015-01-04 02:47:46'),\n    (113,'Larry','lhill34@linkedin.com','66.5.206.195',TIMESTAMP '2010-11-02 10:21:17'),\n    (114,'Marie','mmeyer35@mysql.com','151.152.88.107',TIMESTAMP '1990-05-22 20:52:51'),\n    (115,'Clarence','cwebb36@skype.com','130.198.55.217',TIMESTAMP '1972-10-27 07:38:54'),\n    (116,'Sarah','scarter37@answers.com','80.89.18.153',TIMESTAMP '1971-08-24 19:29:30'),\n    (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174',TIMESTAMP '1973-01-27 09:00:42'),\n    (118,'Teresa','thenry39@hao123.com','32.187.239.106',TIMESTAMP '2015-11-06 01:48:44'),\n    (119,'Billy','bgutierrez3a@sun.com','52.37.70.134',TIMESTAMP '2002-03-19 03:20:19'),\n    (120,'Anthony','agibson3b@github.io','154.251.232.213',TIMESTAMP '1991-04-19 01:08:15'),\n    (121,'Sandra','sromero3c@wikia.com','44.124.171.2',TIMESTAMP '1998-09-06 20:30:34'),\n    (122,'Paula','pandrews3d@blogs.com','153.142.118.226',TIMESTAMP '2003-06-24 16:31:24'),\n    (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219',TIMESTAMP '1970-12-09 23:57:21'),\n    (124,'Lois','lwilson3f@reuters.com','147.44.171.83',TIMESTAMP '1971-01-09 22:28:51'),\n    (125,'Sara','smorgan3g@nature.com','197.67.192.230',TIMESTAMP '1992-01-28 20:33:24'),\n    (126,'Charles','ctorres3h@china.com.cn','156.115.216.2',TIMESTAMP '1993-10-02 19:36:34'),\n    (127,'Richard','ralexander3i@marriott.com','248.235.180.59',TIMESTAMP '1999-02-03 18:40:55'),\n    (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129',TIMESTAMP '1978-09-13 00:37:32'),\n    (129,'Steve','sadams3k@economist.com','112.248.91.98',TIMESTAMP '2004-03-21 09:07:43'),\n    (130,'Katherine','krobertson3l@ow.ly','37.220.107.28',TIMESTAMP '1977-03-18 19:28:50'),\n    (131,'Donna','dgibson3m@state.gov','222.218.76.221',TIMESTAMP '1999-02-01 06:46:16'),\n    (132,'Christina','cwest3n@mlb.com','152.114.6.160',TIMESTAMP '1979-12-24 15:30:35'),\n    (133,'Sandra','swillis3o@meetup.com','180.71.49.34',TIMESTAMP '1984-09-27 08:05:54'),\n    (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186',TIMESTAMP '1979-04-16 16:52:10'),\n    (135,'Ruby','rjames3q@wp.com','98.61.54.20',TIMESTAMP '2007-01-13 14:25:52'),\n    (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172',TIMESTAMP '2009-07-25 04:34:30'),\n    (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146',TIMESTAMP '2012-05-09 22:12:33'),\n    (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173',TIMESTAMP '1999-11-09 08:08:44'),\n    (139,'Deborah','dray3u@i2i.jp','11.108.186.217',TIMESTAMP '2014-02-04 03:15:19'),\n    (140,'Paul','parmstrong3v@alexa.com','6.250.59.43',TIMESTAMP '2009-12-21 10:08:53'),\n    (141,'Aaron','abishop3w@opera.com','207.145.249.62',TIMESTAMP '1996-04-25 23:20:23'),\n    (142,'Henry','hsanders3x@google.ru','140.215.203.171',TIMESTAMP '2012-01-29 11:52:32'),\n    (143,'Anne','aanderson3y@1688.com','74.150.102.118',TIMESTAMP '1982-04-03 13:46:17'),\n    (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152',TIMESTAMP '1987-11-03 19:58:41'),\n    (145,'Evelyn','ereid40@pbs.org','249.122.33.117',TIMESTAMP '1977-12-14 17:09:57'),\n    (146,'Brian','bgonzalez41@wikia.com','246.254.235.141',TIMESTAMP '1991-02-24 00:45:58'),\n    (147,'Sandra','sgray42@squarespace.com','150.73.28.159',TIMESTAMP '1972-07-28 17:26:32'),\n    (148,'Alice','ajones43@a8.net','78.253.12.177',TIMESTAMP '2002-12-05 16:57:46'),\n    (149,'Jessica','jhanson44@mapquest.com','87.229.30.160',TIMESTAMP '1994-01-30 11:40:04'),\n    (150,'Louise','lbailey45@reuters.com','191.219.31.101',TIMESTAMP '2011-09-07 21:11:45'),\n    (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239',TIMESTAMP '1984-10-24 14:58:04'),\n    (152,'Gregory','gcollins47@yandex.ru','28.176.10.115',TIMESTAMP '1998-07-25 17:17:10'),\n    (153,'Jane','jperkins48@usnews.com','46.53.164.159',TIMESTAMP '1979-08-19 15:25:00'),\n    (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2',TIMESTAMP '1985-07-06 02:16:36'),\n    (155,'Adam','acarter4a@scribd.com','78.48.148.204',TIMESTAMP '2005-07-20 03:31:09'),\n    (156,'Frank','fweaver4b@angelfire.com','199.180.255.224',TIMESTAMP '2011-03-04 23:07:54'),\n    (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231',TIMESTAMP '1991-01-11 10:39:41'),\n    (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223',TIMESTAMP '2009-01-17 21:05:15'),\n    (159,'Rose','rfoster4e@woothemes.com','203.169.53.16',TIMESTAMP '1991-04-21 02:09:38'),\n    (160,'George','ggarrett4f@uiuc.edu','186.61.5.167',TIMESTAMP '1989-11-11 11:29:42'),\n    (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38',TIMESTAMP '2012-06-22 18:01:23'),\n    (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203',TIMESTAMP '1980-04-16 15:26:34'),\n    (163,'Martin','mwells4i@ifeng.com','97.223.55.105',TIMESTAMP '2010-05-26 14:08:18'),\n    (164,'Diana','dstone4j@google.ru','90.155.52.47',TIMESTAMP '2013-02-11 00:14:54'),\n    (165,'Walter','wferguson4k@blogger.com','30.63.212.44',TIMESTAMP '1986-02-20 17:46:46'),\n    (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77',TIMESTAMP '1992-05-13 20:14:14'),\n    (167,'Philip','pknight4m@xing.com','15.28.135.167',TIMESTAMP '2000-09-11 18:41:13'),\n    (168,'Russell','rcarr4n@youtube.com','113.55.165.50',TIMESTAMP '2008-07-10 17:49:27'),\n    (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111',TIMESTAMP '1992-02-10 17:24:58'),\n    (170,'Anne','along4p@squidoo.com','36.154.58.107',TIMESTAMP '2012-08-19 23:35:31'),\n    (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114',TIMESTAMP '1972-03-11 21:46:44'),\n    (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69',TIMESTAMP '2013-01-13 21:34:51'),\n    (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144',TIMESTAMP '1998-09-15 14:20:04'),\n    (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102',TIMESTAMP '2009-01-22 05:34:38'),\n    (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226',TIMESTAMP '1984-09-05 22:33:15'),\n    (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46',TIMESTAMP '1990-01-20 12:19:15'),\n    (177,'Arthur','afisher4w@jugem.jp','107.255.244.98',TIMESTAMP '1983-10-13 11:08:46'),\n    (178,'Nicole','nryan4x@wsj.com','243.211.33.221',TIMESTAMP '1974-05-30 23:19:14'),\n    (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101',TIMESTAMP '1992-09-23 02:02:19'),\n    (180,'Terry','tcox4z@reference.com','20.189.120.106',TIMESTAMP '1982-02-13 12:43:14'),\n    (181,'Ashley','astanley50@kickstarter.com','86.3.56.98',TIMESTAMP '1976-05-09 01:27:16'),\n    (182,'Michael','mrivera51@about.me','72.118.249.0',TIMESTAMP '1971-11-11 17:28:37'),\n    (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47',TIMESTAMP '2002-08-24 14:59:25'),\n    (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30',TIMESTAMP '2002-03-11 13:41:29'),\n    (185,'Nicole','nhenderson54@usda.gov','39.253.60.30',TIMESTAMP '1995-04-24 05:55:07'),\n    (186,'Ralph','rharper55@purevolume.com','167.147.142.189',TIMESTAMP '1980-02-10 18:35:45'),\n    (187,'Heather','hcunningham56@photobucket.com','96.222.196.229',TIMESTAMP '2007-06-15 05:37:50'),\n    (188,'Nancy','nlittle57@cbc.ca','241.53.255.175',TIMESTAMP '2007-07-12 23:42:48'),\n    (189,'Juan','jramirez58@pinterest.com','190.128.84.27',TIMESTAMP '1978-11-07 23:37:37'),\n    (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49',TIMESTAMP '1979-03-31 23:27:28'),\n    (191,'Shirley','sstevens5a@prlog.org','200.97.231.248',TIMESTAMP '2011-12-06 07:08:50'),\n    (192,'Annie','areyes5b@squidoo.com','223.32.182.101',TIMESTAMP '2011-05-28 02:42:09'),\n    (193,'Jack','jkelley5c@tiny.cc','47.34.118.150',TIMESTAMP '1981-12-05 17:31:40'),\n    (194,'Keith','krobinson5d@1und1.de','170.210.209.31',TIMESTAMP '1999-03-09 11:05:43'),\n    (195,'Joseph','jmiller5e@google.com.au','136.74.212.139',TIMESTAMP '1984-10-08 13:18:20'),\n    (196,'Annie','aday5f@blogspot.com','71.99.186.69',TIMESTAMP '1986-02-18 12:27:34'),\n    (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107',TIMESTAMP '1983-10-20 17:51:20'),\n    (198,'Tammy','tward5h@ucoz.ru','141.43.164.70',TIMESTAMP '1980-03-31 04:45:29'),\n    (199,'Doris','dryan5i@ted.com','239.117.202.188',TIMESTAMP '1985-07-03 03:17:53'),\n    (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79',TIMESTAMP '1973-04-21 21:36:40'),\n    (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161',TIMESTAMP '2001-01-20 01:42:26'),\n    (202,'Samuel','soliver5l@people.com.cn','86.127.246.140',TIMESTAMP '1970-09-02 02:19:00'),\n    (203,'Carl','csanchez5m@mysql.com','50.149.237.107',TIMESTAMP '1993-12-01 07:02:09'),\n    (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201',TIMESTAMP '2004-07-06 18:39:33'),\n    (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170',TIMESTAMP '2014-11-11 18:52:19'),\n    (206,'Keith','kwillis5p@youtube.com','181.43.206.100',TIMESTAMP '1998-06-13 06:30:51'),\n    (207,'Justin','jwebb5q@intel.com','211.54.245.74',TIMESTAMP '2000-11-04 16:58:26'),\n    (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104',TIMESTAMP '1992-12-01 19:48:28'),\n    (209,'Martin','mpowell5s@flickr.com','153.67.12.241',TIMESTAMP '1983-06-30 06:24:32'),\n    (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172',TIMESTAMP '2005-01-08 20:50:22'),\n    (211,'Edward','eray5u@a8.net','205.187.246.65',TIMESTAMP '2011-09-26 08:04:44'),\n    (212,'Steven','slawson5v@senate.gov','238.150.250.36',TIMESTAMP '1978-11-22 02:48:09'),\n    (213,'Robert','rthompson5w@furl.net','70.7.89.236',TIMESTAMP '2001-09-12 08:52:07'),\n    (214,'Jack','jporter5x@diigo.com','220.172.29.99',TIMESTAMP '1976-07-26 14:29:21'),\n    (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180',TIMESTAMP '2010-03-20 19:21:16'),\n    (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173',TIMESTAMP '2001-03-11 05:36:40'),\n    (217,'Jimmy','jstephens60@weather.com','145.101.93.235',TIMESTAMP '1983-04-12 09:35:30'),\n    (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253',TIMESTAMP '1997-08-29 19:34:34'),\n    (219,'Lawrence','lgilbert62@ted.com','243.132.8.78',TIMESTAMP '2015-04-08 22:06:56'),\n    (220,'David','dgardner63@4shared.com','204.40.46.136',TIMESTAMP '1971-07-09 03:29:11'),\n    (221,'Charles','ckennedy64@gmpg.org','211.83.233.2',TIMESTAMP '2011-02-26 11:55:04'),\n    (222,'Lillian','lbanks65@msu.edu','124.233.12.80',TIMESTAMP '2010-05-16 20:29:02'),\n    (223,'Ernest','enguyen66@baidu.com','82.45.128.148',TIMESTAMP '1996-07-04 10:07:04'),\n    (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223',TIMESTAMP '1983-08-05 12:36:29'),\n    (225,'Donald','ddavis68@ustream.tv','47.39.218.137',TIMESTAMP '1989-05-27 02:30:56'),\n    (226,'Joe','jscott69@blogspot.com','140.23.131.75',TIMESTAMP '1973-03-16 12:21:31'),\n    (227,'Anne','amarshall6a@google.ca','113.162.200.197',TIMESTAMP '1988-12-09 03:38:29'),\n    (228,'Willie','wturner6b@constantcontact.com','85.83.182.249',TIMESTAMP '1991-10-06 01:51:10'),\n    (229,'Nicole','nwilson6c@sogou.com','30.223.51.135',TIMESTAMP '1977-05-29 19:54:56'),\n    (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144',TIMESTAMP '2011-03-13 12:48:47'),\n    (231,'Lois','lcarr6e@statcounter.com','0.41.36.53',TIMESTAMP '1993-02-06 04:52:01'),\n    (232,'Shirley','scruz6f@tmall.com','37.156.39.223',TIMESTAMP '2007-02-18 17:47:01'),\n    (233,'Patrick','pford6g@reverbnation.com','36.198.200.89',TIMESTAMP '1977-03-06 15:47:24'),\n    (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137',TIMESTAMP '2014-10-28 01:56:56'),\n    (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202',TIMESTAMP '1987-11-30 16:44:47'),\n    (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148',TIMESTAMP '1979-01-18 06:33:45'),\n    (237,'Pamela','pramirez6k@census.gov','138.233.34.163',TIMESTAMP '2012-01-29 10:35:20'),\n    (238,'Daniel','dcarr6l@php.net','146.21.152.242',TIMESTAMP '1984-11-17 08:22:59'),\n    (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36',TIMESTAMP '2001-05-30 22:16:44'),\n    (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38',TIMESTAMP '2000-01-05 20:35:41'),\n    (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220',TIMESTAMP '1996-11-08 01:27:31'),\n    (242,'Johnny','jmason6p@flickr.com','192.8.232.114',TIMESTAMP '2013-05-14 05:35:50'),\n    (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197',TIMESTAMP '1977-08-20 02:05:17'),\n    (244,'Douglas','dcole6r@cmu.edu','128.135.212.69',TIMESTAMP '2016-10-26 17:40:36'),\n    (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235',TIMESTAMP '1987-04-26 07:07:13'),\n    (246,'Donna','dgreen6t@chron.com','227.116.46.107',TIMESTAMP '2011-07-25 12:59:54'),\n    (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89',TIMESTAMP '1974-05-03 20:28:18'),\n    (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178',TIMESTAMP '2011-12-20 17:40:18'),\n    (249,'Melissa','mmurray6w@bbb.org','114.234.118.137',TIMESTAMP '1991-02-26 12:45:44'),\n    (250,'Jean','jlittle6x@epa.gov','141.21.163.254',TIMESTAMP '1991-08-16 04:57:09'),\n    (251,'Daniel','dolson6y@nature.com','125.75.104.97',TIMESTAMP '2010-04-23 06:25:54'),\n    (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249',TIMESTAMP '2015-01-31 02:21:50'),\n    (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26',TIMESTAMP '1971-12-11 10:31:31'),\n    (254,'Beverly','broberts71@bluehost.com','244.40.158.89',TIMESTAMP '2013-09-21 13:02:31'),\n    (255,'Pamela','pmurray72@netscape.com','218.54.95.216',TIMESTAMP '1985-04-16 00:34:00'),\n    (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229',TIMESTAMP '2000-11-11 09:48:28'),\n    (257,'Mildred','mpalmer74@is.gd','234.125.95.132',TIMESTAMP '1992-05-25 02:25:02'),\n    (258,'Jessica','jcampbell75@google.it','55.98.30.140',TIMESTAMP '2014-08-26 00:26:34'),\n    (259,'Beverly','bthomas76@cpanel.net','48.78.228.176',TIMESTAMP '1970-08-18 10:40:05'),\n    (260,'Eugene','eward77@cargocollective.com','139.226.204.2',TIMESTAMP '1996-12-04 23:17:00'),\n    (261,'Andrea','aallen78@webnode.com','160.31.214.38',TIMESTAMP '2009-07-06 07:22:37'),\n    (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122',TIMESTAMP '2005-06-06 11:44:19'),\n    (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128',TIMESTAMP '2001-07-03 02:00:10'),\n    (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221',TIMESTAMP '1994-08-18 19:45:40'),\n    (265,'Russell','rmiller7c@instagram.com','184.130.152.253',TIMESTAMP '1977-11-06 01:58:12'),\n    (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206',TIMESTAMP '1990-12-03 22:45:24'),\n    (267,'Raymond','rknight7e@yandex.ru','161.2.44.252',TIMESTAMP '1995-08-25 04:31:19'),\n    (268,'Bonnie','brussell7f@elpais.com','199.237.57.207',TIMESTAMP '1991-03-29 08:32:06'),\n    (269,'Marie','mhenderson7g@elpais.com','52.203.131.144',TIMESTAMP '2004-06-04 21:50:28'),\n    (270,'Alan','acarr7h@trellian.com','147.51.205.72',TIMESTAMP '2005-03-03 10:51:31'),\n    (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226',TIMESTAMP '2004-08-04 13:42:40'),\n    (272,'Christina','cdaniels7j@census.gov','0.238.61.251',TIMESTAMP '1972-10-18 12:47:33'),\n    (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56',TIMESTAMP '2013-01-13 10:41:35'),\n    (274,'Laura','lwood7l@icio.us','149.153.38.205',TIMESTAMP '2011-06-25 09:33:59'),\n    (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172',TIMESTAMP '1999-03-05 03:25:36'),\n    (276,'Denise','dparker7n@icq.com','0.213.88.138',TIMESTAMP '2011-11-04 09:43:06'),\n    (277,'Phillip','pparker7o@discuz.net','219.242.165.240',TIMESTAMP '1973-10-19 04:22:29'),\n    (278,'Joan','jpierce7p@salon.com','63.31.213.202',TIMESTAMP '1989-04-09 22:06:24'),\n    (279,'Irene','ibaker7q@cbc.ca','102.33.235.114',TIMESTAMP '1992-09-04 13:00:57'),\n    (280,'Betty','bbowman7r@ted.com','170.91.249.242',TIMESTAMP '2015-09-28 08:14:22'),\n    (281,'Teresa','truiz7s@boston.com','82.108.158.207',TIMESTAMP '1999-07-18 05:17:09'),\n    (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187',TIMESTAMP '2003-01-06 15:45:29'),\n    (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184',TIMESTAMP '2010-05-28 01:56:37'),\n    (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220',TIMESTAMP '1993-04-03 20:33:51'),\n    (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78',TIMESTAMP '1990-01-05 17:28:43'),\n    (286,'Terry','tross7x@macromedia.com','29.112.114.133',TIMESTAMP '2009-08-29 21:32:17'),\n    (287,'Angela','abradley7y@icq.com','177.44.27.72',TIMESTAMP '1989-10-04 21:46:06'),\n    (288,'Maria','mhart7z@dailymotion.com','55.27.55.202',TIMESTAMP '1975-01-21 01:22:57'),\n    (289,'Raymond','randrews80@pinterest.com','88.90.78.67',TIMESTAMP '1992-03-16 21:37:40'),\n    (290,'Kathy','krice81@bluehost.com','212.63.196.102',TIMESTAMP '2000-12-14 03:06:44'),\n    (291,'Cynthia','cramos82@nymag.com','107.89.190.6',TIMESTAMP '2005-06-28 02:02:33'),\n    (292,'Kimberly','kjones83@mysql.com','86.169.101.101',TIMESTAMP '2007-06-13 22:56:49'),\n    (293,'Timothy','thansen84@microsoft.com','108.100.254.90',TIMESTAMP '2003-04-04 10:31:57'),\n    (294,'Carol','cspencer85@berkeley.edu','75.118.144.187',TIMESTAMP '1999-03-30 14:53:21'),\n    (295,'Louis','lmedina86@latimes.com','141.147.163.24',TIMESTAMP '1991-04-11 17:53:13'),\n    (296,'Margaret','mcole87@google.fr','53.184.26.83',TIMESTAMP '1991-12-19 01:54:10'),\n    (297,'Mary','mgomez88@yellowpages.com','208.56.57.99',TIMESTAMP '1976-05-21 18:05:08'),\n    (298,'Amanda','aanderson89@geocities.com','147.73.15.252',TIMESTAMP '1987-08-22 15:05:28'),\n    (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220',TIMESTAMP '1976-07-15 04:25:04'),\n    (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193',TIMESTAMP '1990-09-03 21:39:31'),\n    (301,'Lois','lkennedy8c@amazon.de','194.169.29.187',TIMESTAMP '2007-07-29 14:09:31'),\n    (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249',TIMESTAMP '2013-09-05 11:32:46'),\n    (303,'Betty','belliott8e@wired.com','183.105.222.199',TIMESTAMP '1979-09-19 19:29:13'),\n    (304,'Bobby','bmeyer8f@census.gov','36.13.161.145',TIMESTAMP '2014-05-24 14:34:39'),\n    (305,'Ann','amorrison8g@sfgate.com','72.154.54.137',TIMESTAMP '1978-10-05 14:22:34'),\n    (306,'Daniel','djackson8h@wunderground.com','144.95.32.34',TIMESTAMP '1990-07-27 13:23:05'),\n    (307,'Joe','jboyd8i@alibaba.com','187.105.86.178',TIMESTAMP '2011-09-28 16:46:32'),\n    (308,'Ralph','rdunn8j@fc2.com','3.19.87.255',TIMESTAMP '1984-10-18 08:00:40'),\n    (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215',TIMESTAMP '1998-07-04 12:15:21'),\n    (310,'Paula','pdean8l@hhs.gov','161.100.173.197',TIMESTAMP '1973-02-13 09:38:55'),\n    (311,'Andrew','agarrett8m@behance.net','199.253.123.218',TIMESTAMP '1991-02-14 13:36:32'),\n    (312,'Janet','jhowell8n@alexa.com','39.189.139.79',TIMESTAMP '2012-11-24 20:17:33'),\n    (313,'Keith','khansen8o@godaddy.com','116.186.223.196',TIMESTAMP '1987-08-23 21:22:05'),\n    (314,'Nicholas','nedwards8p@state.gov','142.175.142.11',TIMESTAMP '1977-03-28 18:27:27'),\n    (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192',TIMESTAMP '1994-10-26 11:44:26'),\n    (316,'Frank','fgardner8r@mapy.cz','154.77.119.169',TIMESTAMP '1983-01-29 19:19:51'),\n    (317,'Eric','eharrison8s@google.cn','245.139.65.123',TIMESTAMP '1984-02-04 09:54:36'),\n    (318,'Gregory','gcooper8t@go.com','171.147.0.221',TIMESTAMP '2004-06-14 05:22:08'),\n    (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5',TIMESTAMP '1977-01-07 18:23:43'),\n    (320,'Juan','jlewis8v@shinystat.com','216.181.171.189',TIMESTAMP '2001-08-23 17:32:43'),\n    (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28',TIMESTAMP '1983-02-17 00:05:50'),\n    (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148',TIMESTAMP '2004-02-15 10:12:03'),\n    (323,'Annie','ahunter8y@example.com','63.36.34.103',TIMESTAMP '2003-07-23 21:15:25'),\n    (324,'Melissa','mflores8z@cbc.ca','151.230.217.90',TIMESTAMP '1983-11-02 14:53:56'),\n    (325,'Jane','jweaver90@about.me','0.167.235.217',TIMESTAMP '1987-07-29 00:13:44'),\n    (326,'Anthony','asmith91@oracle.com','97.87.48.41',TIMESTAMP '2001-05-31 18:44:11'),\n    (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51',TIMESTAMP '2015-09-12 23:13:55'),\n    (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186',TIMESTAMP '2010-10-28 08:26:27'),\n    (329,'Chris','cmurray94@bluehost.com','25.158.167.97',TIMESTAMP '2004-05-05 16:10:31'),\n    (330,'Denise','dfuller95@hugedomains.com','216.210.149.28',TIMESTAMP '1979-04-20 08:57:24'),\n    (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213',TIMESTAMP '2009-08-15 03:26:16'),\n    (332,'Jesse','jhoward97@google.cn','46.181.118.30',TIMESTAMP '1974-04-18 14:08:41'),\n    (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87',TIMESTAMP '2006-06-30 14:46:52'),\n    (334,'Janice','jwoods99@pen.io','229.245.237.182',TIMESTAMP '1988-04-06 11:52:58'),\n    (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80',TIMESTAMP '1976-10-05 08:44:16'),\n    (336,'Joshua','jray9b@opensource.org','192.253.12.198',TIMESTAMP '1971-12-25 22:27:07'),\n    (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215',TIMESTAMP '2001-12-31 22:08:13'),\n    (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201',TIMESTAMP '1983-02-18 20:07:47'),\n    (339,'Christine','chudson9e@yelp.com','64.198.43.56',TIMESTAMP '1997-09-08 08:03:43'),\n    (340,'Joe','jparker9f@earthlink.net','251.215.148.153',TIMESTAMP '1973-11-04 05:08:18'),\n    (341,'Thomas','tkim9g@answers.com','49.187.34.47',TIMESTAMP '1991-08-07 21:13:48'),\n    (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16',TIMESTAMP '2009-12-08 02:35:49'),\n    (343,'James','jmitchell9i@umich.edu','43.121.18.147',TIMESTAMP '2011-04-28 17:04:09'),\n    (344,'Charles','cgardner9j@purevolume.com','197.78.240.240',TIMESTAMP '1998-02-11 06:47:07'),\n    (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88',TIMESTAMP '2002-05-10 15:33:14'),\n    (346,'Chris','cgray9l@4shared.com','249.70.192.240',TIMESTAMP '1998-10-03 16:43:42'),\n    (347,'Gloria','ghayes9m@hibu.com','81.103.138.26',TIMESTAMP '1999-12-26 11:23:13'),\n    (348,'Edward','eramirez9n@shareasale.com','38.136.90.136',TIMESTAMP '2010-08-19 08:01:06'),\n    (349,'Cheryl','cbutler9o@google.ca','172.180.78.172',TIMESTAMP '1995-05-27 20:03:52'),\n    (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6',TIMESTAMP '2014-10-21 01:42:58'),\n    (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208',TIMESTAMP '2001-02-08 12:19:06'),\n    (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190',TIMESTAMP '1970-01-05 11:29:12'),\n    (353,'Sandra','slewis9s@dyndns.org','77.215.201.236',TIMESTAMP '1974-01-05 07:04:04'),\n    (354,'Todd','tfranklin9t@g.co','167.125.181.82',TIMESTAMP '2009-09-28 10:13:58'),\n    (355,'Joseph','jlewis9u@webmd.com','244.204.6.11',TIMESTAMP '1990-10-21 15:49:57'),\n    (356,'Alan','aknight9v@nydailynews.com','152.197.95.83',TIMESTAMP '1996-03-08 08:43:17'),\n    (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26',TIMESTAMP '1985-11-30 12:09:24'),\n    (358,'Annie','awright9x@cafepress.com','190.45.231.111',TIMESTAMP '2000-08-24 11:56:06'),\n    (359,'Diane','dhamilton9y@youtube.com','85.146.171.196',TIMESTAMP '2015-02-24 02:03:57'),\n    (360,'Antonio','alane9z@auda.org.au','61.63.146.203',TIMESTAMP '2001-05-13 03:43:34'),\n    (361,'Matthew','mallena0@hhs.gov','29.97.32.19',TIMESTAMP '1973-02-19 23:43:32'),\n    (362,'Bonnie','bfowlera1@soup.io','251.216.99.53',TIMESTAMP '2013-08-01 15:35:41'),\n    (363,'Margaret','mgraya2@examiner.com','69.255.151.79',TIMESTAMP '1998-01-23 22:24:59'),\n    (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61',TIMESTAMP '1973-07-13 00:30:22'),\n    (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214',TIMESTAMP '2006-11-19 11:52:26'),\n    (366,'Mark','mcartera5@cpanel.net','220.33.102.142',TIMESTAMP '2007-09-09 09:43:27'),\n    (367,'Paula','ppricea6@msn.com','36.182.238.124',TIMESTAMP '2009-11-11 09:13:05'),\n    (368,'Catherine','cgreena7@army.mil','228.203.58.19',TIMESTAMP '2005-08-09 16:52:15'),\n    (369,'Helen','hhamiltona8@symantec.com','155.56.194.99',TIMESTAMP '2005-02-01 05:40:36'),\n    (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213',TIMESTAMP '2013-11-06 22:10:23'),\n    (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48',TIMESTAMP '1994-12-26 23:43:42'),\n    (372,'Mark','mmarshallab@tumblr.com','114.74.60.47',TIMESTAMP '2016-09-29 18:03:01'),\n    (373,'Andrew','amartinezac@google.cn','182.54.37.130',TIMESTAMP '1976-06-06 17:04:17'),\n    (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123',TIMESTAMP '1977-03-28 19:06:59'),\n    (375,'Bonnie','bstoneae@php.net','196.149.79.137',TIMESTAMP '1970-02-05 17:05:58'),\n    (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218',TIMESTAMP '2008-11-20 21:51:55'),\n    (377,'Willie','wwestag@army.mil','35.189.92.118',TIMESTAMP '1992-07-24 05:08:08'),\n    (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141',TIMESTAMP '2010-01-25 06:29:01'),\n    (379,'Sandra','swardai@baidu.com','63.11.113.240',TIMESTAMP '1985-05-23 08:07:37'),\n    (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50',TIMESTAMP '2015-03-05 09:42:07'),\n    (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24',TIMESTAMP '1975-07-22 22:22:02'),\n    (382,'Juan','jcastilloal@yelp.com','24.174.74.202',TIMESTAMP '2007-01-17 09:32:43'),\n    (383,'Donna','dfrazieram@artisteer.com','205.26.147.45',TIMESTAMP '1990-02-11 20:55:46'),\n    (384,'Rachel','rfloresan@w3.org','109.60.216.162',TIMESTAMP '1983-05-22 22:42:18'),\n    (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130',TIMESTAMP '2009-05-01 18:02:51'),\n    (386,'Donald','dbradleyap@etsy.com','42.54.35.126',TIMESTAMP '1997-01-16 16:31:52'),\n    (387,'Rachel','rfisheraq@nih.gov','160.243.250.45',TIMESTAMP '2006-02-17 22:05:49'),\n    (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111',TIMESTAMP '1976-06-21 03:36:29'),\n    (389,'Timothy','twhiteas@ca.gov','36.128.23.70',TIMESTAMP '1975-09-24 03:51:18'),\n    (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184',TIMESTAMP '1983-04-27 09:02:50'),\n    (391,'Billy','bfowlerau@jimdo.com','91.200.68.196',TIMESTAMP '1995-01-29 06:57:35'),\n    (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125',TIMESTAMP '1992-10-27 04:31:39'),\n    (393,'Linda','lromeroaw@usa.gov','100.71.233.19',TIMESTAMP '1992-06-08 15:13:18'),\n    (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193',TIMESTAMP '2001-11-11 06:51:01'),\n    (395,'Katherine','kburkeay@wix.com','151.156.242.141',TIMESTAMP '2010-06-14 19:54:28'),\n    (396,'Martha','mharrisonaz@youku.com','21.222.10.199',TIMESTAMP '1989-10-16 14:17:55'),\n    (397,'Dennis','dwellsb0@youtu.be','103.16.29.3',TIMESTAMP '1985-12-21 06:05:51'),\n    (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234',TIMESTAMP '1982-08-27 01:04:43'),\n    (399,'Brenda','bfullerb2@t.co','33.253.63.90',TIMESTAMP '2011-04-20 05:00:35'),\n    (400,'Larry','lhendersonb3@disqus.com','88.95.132.128',TIMESTAMP '1982-08-31 02:15:12'),\n    (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150',TIMESTAMP '1979-04-15 14:08:09'),\n    (402,'Terry','thuntb5@usa.gov','65.91.103.240',TIMESTAMP '1998-05-15 11:50:49'),\n    (403,'Harry','hburnsb6@nasa.gov','33.38.21.244',TIMESTAMP '1981-04-12 14:02:20'),\n    (404,'Diana','dellisb7@mlb.com','218.229.81.135',TIMESTAMP '1997-01-29 00:17:25'),\n    (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216',TIMESTAMP '1984-03-09 17:24:03'),\n    (406,'Julia','jlongb9@fotki.com','10.210.12.104',TIMESTAMP '2005-10-26 03:54:13'),\n    (407,'Lois','lscottba@msu.edu','188.79.136.138',TIMESTAMP '1973-02-02 18:40:39'),\n    (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108',TIMESTAMP '2012-06-09 18:22:26'),\n    (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119',TIMESTAMP '1983-09-28 21:11:27'),\n    (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79',TIMESTAMP '1970-08-31 21:08:01'),\n    (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226',TIMESTAMP '1993-05-22 08:07:07'),\n    (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152',TIMESTAMP '1978-12-31 20:24:51'),\n    (413,'Christine','cgreenebg@flickr.com','97.25.140.118',TIMESTAMP '1978-07-17 12:55:30'),\n    (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211',TIMESTAMP '1984-02-27 13:40:08'),\n    (415,'Kimberly','kromerobi@aol.com','73.56.88.247',TIMESTAMP '1976-09-16 14:22:04'),\n    (416,'George','gjohnstonbj@fda.gov','240.36.245.185',TIMESTAMP '1979-07-24 14:36:02'),\n    (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140',TIMESTAMP '2012-09-12 01:56:41'),\n    (418,'Andrea','astevensbl@goo.gl','31.152.207.204',TIMESTAMP '1979-05-24 11:06:21'),\n    (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241',TIMESTAMP '1984-02-23 04:07:41'),\n    (420,'Terry','tmorenobn@blinklist.com','92.161.34.42',TIMESTAMP '1994-06-25 14:01:35'),\n    (421,'Christopher','cmorenobo@go.com','158.86.176.82',TIMESTAMP '1973-09-05 09:18:47'),\n    (422,'Dennis','dhansonbp@ning.com','40.160.81.75',TIMESTAMP '1982-01-20 10:19:41'),\n    (423,'Beverly','brussellbq@de.vu','138.32.56.204',TIMESTAMP '1997-11-06 07:20:19'),\n    (424,'Howard','hparkerbr@163.com','103.171.134.171',TIMESTAMP '2015-06-24 15:37:10'),\n    (425,'Helen','hmccoybs@fema.gov','61.200.4.71',TIMESTAMP '1995-06-20 08:59:10'),\n    (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125',TIMESTAMP '1977-04-11 07:59:28'),\n    (427,'Tina','twestbu@nhs.uk','80.213.117.74',TIMESTAMP '1992-08-19 05:54:44'),\n    (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95',TIMESTAMP '1991-09-19 23:22:55'),\n    (429,'Ashley','aburtonbw@wix.com','233.176.205.109',TIMESTAMP '2009-11-10 05:01:20'),\n    (430,'Eric','emyersbx@1und1.de','168.91.212.67',TIMESTAMP '1987-08-10 07:16:20'),\n    (431,'Barbara','blittleby@lycos.com','242.14.189.239',TIMESTAMP '2008-08-02 12:13:04'),\n    (432,'Sean','sevansbz@instagram.com','14.39.177.13',TIMESTAMP '2007-04-16 17:28:49'),\n    (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76',TIMESTAMP '1980-12-10 02:19:29'),\n    (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117',TIMESTAMP '1987-03-01 02:25:45'),\n    (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141',TIMESTAMP '1972-09-22 12:59:34'),\n    (436,'Nicole','nstewartc3@1688.com','8.164.23.115',TIMESTAMP '1998-10-27 00:10:17'),\n    (437,'Earl','ekimc4@ovh.net','100.26.244.177',TIMESTAMP '2013-01-22 10:05:46'),\n    (438,'Beverly','breedc5@reuters.com','174.12.226.27',TIMESTAMP '1974-09-22 07:29:36'),\n    (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164',TIMESTAMP '1992-06-05 00:43:40'),\n    (440,'Charles','cmoorec7@ucoz.com','252.197.131.69',TIMESTAMP '1990-04-09 02:34:05'),\n    (441,'Alice','alawsonc8@live.com','183.73.220.232',TIMESTAMP '1989-02-28 09:11:04'),\n    (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14',TIMESTAMP '2005-05-02 19:57:21'),\n    (443,'Carolyn','cfowlerca@go.com','213.109.55.202',TIMESTAMP '1978-09-10 20:18:20'),\n    (444,'Anthony','alongcb@free.fr','169.221.158.204',TIMESTAMP '1984-09-13 01:59:23'),\n    (445,'Annie','amoorecc@e-recht24.de','50.34.148.61',TIMESTAMP '2009-03-26 03:41:07'),\n    (446,'Carlos','candrewscd@ihg.com','236.69.59.212',TIMESTAMP '1972-03-29 22:42:48'),\n    (447,'Beverly','bramosce@google.ca','164.250.184.49',TIMESTAMP '1982-11-10 04:34:01'),\n    (448,'Teresa','tlongcf@umich.edu','174.88.53.223',TIMESTAMP '1987-05-17 12:48:00'),\n    (449,'Roy','rboydcg@uol.com.br','91.58.243.215',TIMESTAMP '1974-06-16 17:59:54'),\n    (450,'Ashley','afieldsch@tamu.edu','130.138.11.126',TIMESTAMP '1983-09-15 05:52:36'),\n    (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245',TIMESTAMP '2003-10-22 12:24:03'),\n    (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103',TIMESTAMP '1980-11-13 11:01:26'),\n    (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151',TIMESTAMP '1982-05-17 23:58:03'),\n    (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166',TIMESTAMP '1998-10-04 00:02:21'),\n    (455,'Shirley','speterscm@salon.com','248.126.31.15',TIMESTAMP '1987-01-30 06:04:59'),\n    (456,'Annie','abowmancn@economist.com','222.213.248.59',TIMESTAMP '2006-03-14 23:52:59'),\n    (457,'Jean','jlarsonco@blogspot.com','71.41.25.195',TIMESTAMP '2007-09-08 23:49:45'),\n    (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28',TIMESTAMP '2011-03-14 20:25:40'),\n    (459,'Norma','nrobinsoncq@economist.com','28.225.21.54',TIMESTAMP '1989-10-21 01:22:43'),\n    (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153',TIMESTAMP '2008-06-27 02:27:30'),\n    (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249',TIMESTAMP '2016-01-28 16:43:44'),\n    (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77',TIMESTAMP '2000-05-24 17:33:06'),\n    (463,'Edward','ebryantcu@jigsy.com','140.31.98.193',TIMESTAMP '1992-12-17 08:32:47'),\n    (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206',TIMESTAMP '1970-12-05 03:42:19'),\n    (465,'Michael','mweavercw@reference.com','7.233.133.213',TIMESTAMP '1987-03-29 02:30:54'),\n    (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224',TIMESTAMP '1990-07-30 07:20:57'),\n    (467,'Philip','phowardcy@comsenz.com','159.170.247.249',TIMESTAMP '2010-10-15 10:18:37'),\n    (468,'Mary','mmarshallcz@xing.com','125.132.189.70',TIMESTAMP '2007-07-19 13:48:47'),\n    (469,'Scott','salvarezd0@theguardian.com','78.49.103.230',TIMESTAMP '1987-10-31 06:10:44'),\n    (470,'Wayne','wcarrolld1@blog.com','238.1.120.204',TIMESTAMP '1980-11-19 03:26:10'),\n    (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49',TIMESTAMP '2010-05-06 22:17:04'),\n    (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240',TIMESTAMP '2007-12-12 19:02:51'),\n    (473,'Steven','sdixond4@wisc.edu','167.55.237.52',TIMESTAMP '1984-05-05 11:44:37'),\n    (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133',TIMESTAMP '2000-07-06 08:44:37'),\n    (475,'Jason','jrobinsond6@hexun.com','138.119.139.56',TIMESTAMP '2006-02-03 05:27:45'),\n    (476,'Doris','dwoodd7@fema.gov','180.220.156.190',TIMESTAMP '1978-05-11 20:14:20'),\n    (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229',TIMESTAMP '2006-11-18 08:29:06'),\n    (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1',TIMESTAMP '1985-09-17 02:46:52'),\n    (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118',TIMESTAMP '1974-06-07 19:04:05'),\n    (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174',TIMESTAMP '1999-03-08 02:43:25'),\n    (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234',TIMESTAMP '2007-05-15 13:17:32'),\n    (482,'Harold','hmurraydd@parallels.com','133.26.188.80',TIMESTAMP '1993-11-15 03:42:07'),\n    (483,'Paula','phowellde@samsung.com','34.215.28.216',TIMESTAMP '1993-11-29 15:55:00'),\n    (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123',TIMESTAMP '1986-08-17 10:19:38'),\n    (485,'Phyllis','paustindg@vk.com','50.84.34.178',TIMESTAMP '1994-04-13 03:05:24'),\n    (486,'Laura','lfosterdh@usnews.com','37.8.101.33',TIMESTAMP '2001-06-30 08:58:59'),\n    (487,'Eric','etaylordi@com.com','103.183.253.45',TIMESTAMP '2006-09-15 20:18:46'),\n    (488,'Doris','driveradj@prweb.com','247.16.2.199',TIMESTAMP '1989-05-08 09:27:09'),\n    (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232',TIMESTAMP '1989-08-01 18:36:06'),\n    (490,'Steve','smoralesdl@jigsy.com','3.76.84.207',TIMESTAMP '2011-03-13 17:01:05'),\n    (491,'Louis','lsullivandm@who.int','78.135.44.208',TIMESTAMP '1975-11-26 16:01:23'),\n    (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21',TIMESTAMP '1990-03-13 16:14:56'),\n    (493,'Ann','adixondo@gmpg.org','191.136.222.111',TIMESTAMP '2002-06-05 14:22:18'),\n    (494,'Johnny','jhartdp@amazon.com','103.252.198.39',TIMESTAMP '1988-07-30 23:54:49'),\n    (495,'Susan','srichardsdq@skype.com','126.247.192.11',TIMESTAMP '2005-01-09 12:08:14'),\n    (496,'Brenda','bparkerdr@skype.com','63.232.216.86',TIMESTAMP '1974-05-18 05:58:29'),\n    (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112',TIMESTAMP '2014-08-05 18:22:25'),\n    (498,'Larry','lhayesdt@wordpress.com','162.146.13.46',TIMESTAMP '1997-02-26 14:01:53'),\n    (499,NULL,'ethomasdu@hhs.gov','6.241.88.250',TIMESTAMP '2007-09-14 13:03:34'),\n    (500,'Paula','pshawdv@networksolutions.com','123.27.47.249',TIMESTAMP '2003-10-30 21:19:20')\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/simple_seed/test_seed.py",
    "content": "from pathlib import Path\n\nimport pytest\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestBasicSeedTests as CoreTestBasicSeedTests,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedConfigFullRefreshOff as CoreTestSeedConfigFullRefreshOff,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedConfigFullRefreshOn as CoreTestSeedConfigFullRefreshOn,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedCustomSchema as CoreTestSeedCustomSchema,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedParsing as CoreTestSeedParsing,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedSpecificFormats as CoreTestSeedSpecificFormats,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedWithEmptyDelimiter as CoreTestSeedWithEmptyDelimiter,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedWithUniqueDelimiter as CoreTestSeedWithUniqueDelimiter,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSeedWithWrongDelimiter as CoreTestSeedWithWrongDelimiter,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSimpleSeedEnabledViaConfig as CoreTestSimpleSeedEnabledViaConfig,\n)\nfrom dbt.tests.adapter.simple_seed.test_seed import (\n    TestSimpleSeedWithBOM as CoreTestSimpleSeedWithBOM,\n)\nfrom dbt.tests.util import copy_file, run_dbt\n\nfrom tests.functional.adapter.simple_seed.seeds import (\n    trino_seeds__expected_sql_create_table,\n    trino_seeds__expected_sql_insert_into,\n)\n\n\nclass TrinoSetUpFixture:\n    @pytest.fixture(scope=\"class\", autouse=True)\n    def setUp(self, project):\n        \"\"\"Create table for ensuring seeds and models used in tests build correctly\"\"\"\n        project.run_sql(trino_seeds__expected_sql_create_table)\n        project.run_sql(trino_seeds__expected_sql_insert_into)\n\n\nclass TestTrinoBasicSeedTests(TrinoSetUpFixture, CoreTestBasicSeedTests):\n    # TODO Trino currently does not support DROP TABLE CASCADE.\n    #  Dropping seed won't drop downstream models automatically.\n    @pytest.mark.skip\n    def test_simple_seed_full_refresh_flag(self, project):\n        pass\n\n\n# TODO Trino currently does not support DROP TABLE CASCADE.\n#  Dropping seed won't drop downstream models automatically.\n@pytest.mark.skip\nclass TestTrinoSeedConfigFullRefreshOn(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOn):\n    pass\n\n\nclass TestTrinoSeedConfigFullRefreshOff(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOff):\n    pass\n\n\nclass TestTrinoSeedCustomSchema(TrinoSetUpFixture, CoreTestSeedCustomSchema):\n    pass\n\n\nclass TestTrinoSeedWithUniqueDelimiter(TrinoSetUpFixture, CoreTestSeedWithUniqueDelimiter):\n    pass\n\n\nclass TestTrinoSeedWithWrongDelimiter(TrinoSetUpFixture, CoreTestSeedWithWrongDelimiter):\n    def test_seed_with_wrong_delimiter(self, project):\n        \"\"\"Testing failure of running dbt seed with a wrongly configured delimiter\"\"\"\n        seed_result = run_dbt([\"seed\"], expect_pass=False)\n        assert \"syntax_error\" in seed_result.results[0].message.lower()\n\n\nclass TestTrinoSeedWithEmptyDelimiter(TrinoSetUpFixture, CoreTestSeedWithEmptyDelimiter):\n    pass\n\n\nclass TestTrinoSimpleSeedEnabledViaConfig(CoreTestSimpleSeedEnabledViaConfig):\n    pass\n\n\nclass TestTrinoSeedParsing(TrinoSetUpFixture, CoreTestSeedParsing):\n    pass\n\n\nclass TestTrinoSimpleSeedWithBOM(CoreTestSimpleSeedWithBOM):\n    @pytest.fixture(scope=\"class\", autouse=True)\n    def setUp(self, project):\n        \"\"\"Create table for ensuring seeds and models used in tests build correctly\"\"\"\n        project.run_sql(trino_seeds__expected_sql_create_table)\n        project.run_sql(trino_seeds__expected_sql_insert_into)\n        copy_file(\n            project.test_dir,\n            \"seed_bom.csv\",\n            project.project_root / Path(\"seeds\") / \"seed_bom.csv\",\n            \"\",\n        )\n\n\nclass TestTrinoSeedSpecificFormats(CoreTestSeedSpecificFormats):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/store_failures/fixtures.py",
    "content": "seed_csv = \"\"\"\nid,value\n1,1\n2,2\n3,3\n4,4\n\"\"\".lstrip()\n\ntable_model = \"\"\"\nselect * from {{ ref('seed') }}\n\"\"\"\n\ntable_profile_yml = \"\"\"\nversion: 2\nmodels:\n  - name: table_model\n    columns:\n      - name: id\n        tests:\n          - unique\n          - not_null\n      - name: value\n        quote: true\n        tests:\n          - not_null\n          - accepted_values:\n              values:\n                - 1\n                - 2\n                - 3\n                - 4\n              quote: false\n\nseeds:\n  - name: seed\n    columns:\n      - name: id\n      - name: value\n        tests:\n          - not_null\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/store_failures/test_store_failures.py",
    "content": "import pytest\nfrom dbt.tests.adapter.store_test_failures_tests import basic\nfrom dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import (\n    TestStoreTestFailures,\n)\nfrom dbt.tests.util import run_dbt\n\nfrom tests.functional.adapter.store_failures.fixtures import (\n    seed_csv,\n    table_model,\n    table_profile_yml,\n)\n\n\nclass TestStoreFailuresTable:\n    @property\n    def schema(self):\n        return \"default\"\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"store_failures_tests\",\n            \"quoting\": {\n                \"database\": False,\n                \"schema\": False,\n                \"identifier\": True,\n            },\n            \"models\": {\n                \"+materialized\": \"table\",\n            },\n            \"tests\": {\n                \"+store_failures\": True,\n            },\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"table_model.sql\": table_model,\n            \"table_store_failures.yml\": table_profile_yml,\n        }\n\n    @pytest.fixture(autouse=True)\n    def teardown_method(self, project):\n        yield\n        with project.adapter.connection_named(\"__test\"):\n            relation = project.adapter.Relation.create(\n                database=project.database, schema=f\"{project.test_schema}_dbt_test__audit\"\n            )\n            project.adapter.drop_schema(relation)\n\n    def test_run_seed_test(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        # test tests\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 5\n        # test tests 2nd times\n        results = run_dbt([\"test\"], expect_pass=True)\n        assert len(results) == 5\n\n\nclass TestTrinoTestStoreTestFailures(TestStoreTestFailures):\n    pass\n\n\nclass TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions):\n    pass\n\n\nclass TestStoreTestFailuresAsProjectLevelOff(basic.StoreTestFailuresAsProjectLevelOff):\n    pass\n\n\nclass TestStoreTestFailuresAsProjectLevelView(basic.StoreTestFailuresAsProjectLevelView):\n    pass\n\n\nclass TestStoreTestFailuresAsGeneric(basic.StoreTestFailuresAsGeneric):\n    pass\n\n\nclass TestStoreTestFailuresAsProjectLevelEphemeral(basic.StoreTestFailuresAsProjectLevelEphemeral):\n    pass\n\n\nclass TestStoreTestFailuresAsExceptions(basic.StoreTestFailuresAsExceptions):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_basic.py",
    "content": "import pytest\nfrom dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats\nfrom dbt.tests.adapter.basic.files import generic_test_seed_yml\nfrom dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod\nfrom dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations\nfrom dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate\nfrom dbt.tests.adapter.basic.test_empty import BaseEmpty\nfrom dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral\nfrom dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests\nfrom dbt.tests.adapter.basic.test_incremental import (\n    BaseIncremental,\n    BaseIncrementalNotSchemaChange,\n)\nfrom dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests\nfrom dbt.tests.adapter.basic.test_singular_tests_ephemeral import (\n    BaseSingularTestsEphemeral,\n)\nfrom dbt.tests.adapter.basic.test_validate_connection import BaseValidateConnection\nfrom dbt.tests.util import run_dbt\n\nseeds_base_csv = \"\"\"\nid,name,some_date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n5,Hannah,1982-06-23 05:41:26\n6,Eleanor,1991-08-10 23:12:21\n7,Lily,1971-03-29 14:58:02\n8,Jonathan,1988-02-26 02:55:24\n9,Adrian,1994-02-09 13:14:23\n10,Nora,1976-03-01 16:51:39\n\"\"\".lstrip()\n\n\nseeds_added_csv = (\n    seeds_base_csv\n    + \"\"\"\n11,Mateo,2014-09-07 17:04:27\n12,Julian,2000-02-04 11:48:30\n13,Gabriel,2001-07-10 07:32:52\n14,Isaac,2002-11-24 03:22:28\n15,Levi,2009-11-15 11:57:15\n16,Elizabeth,2005-04-09 03:50:11\n17,Grayson,2019-08-06 19:28:17\n18,Dylan,2014-03-01 11:50:41\n19,Jayden,2009-06-06 07:12:49\n20,Luke,2003-12-05 21:42:18\n\"\"\".lstrip()\n)\n\n\nseed__schema_yml = \"\"\"\nversion: 2\nseeds:\n  - name: seed\n    description: \"The test seed\"\n    columns:\n      - name: id\n        description: The user ID number\n      - name: first_name\n        description: The user's first name\n      - name: email\n        description: The user's email\n      - name: ip_address\n        description: The user's IP address\n      - name: updated_at\n        description: The last time this user's email was updated\n\"\"\"\n\nseed__seed_csv = \"\"\"id,first_name,email,ip_address,updated_at\n1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31\n\"\"\"\n\nincremental_not_schema_change_sql = \"\"\"\n{{ config(materialized=\"incremental\", unique_key=\"user_id_current_time\",on_schema_change=\"sync_all_columns\") }}\nselect\n    '1' || '-' || cast(current_timestamp as varchar) as user_id_current_time,\n    {% if is_incremental() %}\n        'thisis18characters' as platform\n    {% else %}\n        'okthisis20characters' as platform\n    {% endif %}\n\"\"\"\n\n\nclass TestAdapterMethods(BaseAdapterMethod):\n    pass\n\n\n# TODO Internal Galaxy issue: type=INTERNAL_ERROR, name=GENERIC_INTERNAL_ERROR,\n# message=\"Unexpected response status (Internal Server Error) performing operation: entity created\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestSimpleMaterializationsTrino(BaseSimpleMaterializations):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"base\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n        }\n\n\nclass TestSingularTestsTrino(BaseSingularTests):\n    pass\n\n\nclass TestSingularTestsEphemeralTrino(BaseSingularTestsEphemeral):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"singular_tests_ephemeral\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n        }\n\n\nclass TestEmptyTrino(BaseEmpty):\n    pass\n\n\nclass TestEphemeralTrino(BaseEphemeral):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"ephemeral\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"base.csv\": seeds_base_csv,\n        }\n\n\nclass TestIncrementalTrino(BaseIncremental):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"incremental\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"base.csv\": seeds_base_csv, \"added.csv\": seeds_added_csv}\n\n\nclass TestIncrementalFullRefreshTrino(TestIncrementalTrino):\n    def test_incremental(self, project):\n        super().test_incremental(project)\n        results = run_dbt([\"run\", \"--vars\", \"seed_name: base\", \"--full-refresh\"])\n        assert len(results) == 1\n\n\nclass TestIncrementalNotSchemaChangeTrino(BaseIncrementalNotSchemaChange):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\"incremental_not_schema_change.sql\": incremental_not_schema_change_sql}\n\n\nclass TestGenericTestsTrino(BaseGenericTests):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"generic_tests\",\n            \"seeds\": {\n                \"+column_types\": {\"some_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"base.csv\": seeds_base_csv, \"schema.yml\": generic_test_seed_yml}\n\n\nclass TestTrinoValidateConnection(BaseValidateConnection):\n    pass\n\n\nclass TestDocsGenerateTrino(BaseDocsGenerate):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self, unique_schema):\n        alternate_schema = unique_schema + \"_test\"\n        return {\n            \"asset-paths\": [\"assets\", \"invalid-asset-paths\"],\n            \"vars\": {\n                \"test_schema\": unique_schema,\n                \"alternate_schema\": alternate_schema,\n            },\n            \"seeds\": {\n                \"quote_columns\": True,\n                \"+column_types\": {\"updated_at\": \"timestamp(6)\"},\n            },\n            \"quoting\": {\"identifier\": False},\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"schema.yml\": seed__schema_yml, \"seed.csv\": seed__seed_csv}\n\n    @pytest.fixture(scope=\"class\")\n    def expected_catalog(self, project, profile_user):\n        return base_expected_catalog(\n            project,\n            role=None,\n            id_type=\"integer\",\n            text_type=\"varchar\",\n            time_type=\"timestamp(6)\",\n            view_type=\"VIEW\",\n            table_type=\"BASE TABLE\",\n            model_stats=no_stats(),\n        )\n"
  },
  {
    "path": "tests/functional/adapter/test_caching.py",
    "content": "from dbt.tests.adapter.caching.test_caching import (\n    BaseCachingLowercaseModel,\n    BaseCachingSelectedSchemaOnly,\n    BaseCachingUppercaseModel,\n)\n\n\nclass TestCachingLowerCaseModel(BaseCachingLowercaseModel):\n    pass\n\n\nclass TestCachingUppercaseModel(BaseCachingUppercaseModel):\n    pass\n\n\nclass TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_changing_relation_type.py",
    "content": "from dbt.tests.adapter.relations.test_changing_relation_type import (\n    BaseChangeRelationTypeValidator,\n)\n\n\nclass TestTrinoChangeRelationTypes(BaseChangeRelationTypeValidator):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_concurrency.py",
    "content": "from dbt.tests.adapter.concurrency.test_concurrency import (\n    BaseConcurrency,\n    seeds__update_csv,\n)\nfrom dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file\n\n\nclass TestConcurrencyTrino(BaseConcurrency):\n    def test_concurrency(self, project):\n        run_dbt([\"seed\", \"--select\", \"seed\"])\n        results = run_dbt([\"run\"], expect_pass=False)\n        assert len(results) == 7\n        check_relations_equal(project.adapter, [\"SEED\", \"VIEW_MODEL\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"DEP\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"TABLE_A\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"TABLE_B\"])\n\n        rm_file(project.project_root, \"seeds\", \"seed.csv\")\n        write_file(seeds__update_csv, project.project_root + \"/seeds\", \"seed.csv\")\n        results = run_dbt([\"run\"], expect_pass=False)\n        assert len(results) == 7\n        check_relations_equal(project.adapter, [\"SEED\", \"VIEW_MODEL\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"DEP\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"TABLE_A\"])\n        check_relations_equal(project.adapter, [\"SEED\", \"TABLE_B\"])\n"
  },
  {
    "path": "tests/functional/adapter/test_custom_schema.py",
    "content": "from abc import ABC, abstractmethod\n\nimport pytest\nfrom dbt.tests.util import run_dbt, run_sql_with_adapter\n\nseed_csv = \"\"\"\nid,name,date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n\"\"\".lstrip()\n\n\nclass CustomSchemaBase(ABC):\n    \"\"\"\n    This test is meant to ensure that Trino table, view, incremental materialization\n    works as expected for custom schemas\n    \"\"\"\n\n    # set custom schema name\n    custom_schema_name = \"very_custom_schema_name\"\n\n    @property\n    @abstractmethod\n    def table_type(self):\n        pass\n\n    @property\n    @abstractmethod\n    def materialization(self):\n        pass\n\n    # define model\n    def custom_schema_model(self, materialization):\n        return f\"\"\"\n                    {{{{\n                        config(\n                        materialized=\"{materialization}\",\n                        schema=\"{self.custom_schema_name}\"\n                        )\n                    }}}}\n                    select * from {{{{ ref('seed') }}}}\n                \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"seeds\": {\n                \"+column_types\": {\"date\": \"timestamp(6)\"},\n            },\n        }\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            f\"custom_schema_{self.materialization()}_model.sql\": self.custom_schema_model(\n                self.materialization()\n            )\n        }\n\n    @pytest.fixture(scope=\"function\", autouse=True)\n    def teardown_method(self, project):\n        yield\n        relation = project.adapter.Relation.create(\n            database=project.database, schema=f\"{project.test_schema}_{self.custom_schema_name}\"\n        )\n        project.adapter.drop_schema(relation)\n\n    def test_custom_schema_trino(self, project):\n        # Seed seeds, run models.\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Fetch info from information_schema about just created table/view.\n        sql = f\"\"\"\n            select * from {project.adapter.config.credentials.database}.information_schema.tables\n            where table_catalog = '{project.adapter.config.credentials.database}'\n            and table_schema = '{project.adapter.config.credentials.schema}_{self.custom_schema_name}'\n        \"\"\".strip()\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n\n        # Check if fetched info is as expected to be.\n        assert len(results) == 1\n        assert results[0][0] == project.adapter.config.credentials.database\n        assert (\n            results[0][1]\n            == f\"{project.adapter.config.credentials.schema}_{self.custom_schema_name}\"\n        )\n        assert results[0][2] == f\"custom_schema_{self.materialization()}_model\"\n        assert results[0][3] == self.table_type()\n\n\nclass TestCustomSchemaTable(CustomSchemaBase):\n    def materialization(self):\n        return \"table\"\n\n    def table_type(self):\n        return \"BASE TABLE\"\n\n\nclass TestCustomSchemaView(CustomSchemaBase):\n    def materialization(self):\n        return \"view\"\n\n    def table_type(self):\n        return \"VIEW\"\n\n\nclass TestCustomSchemaIncremental(CustomSchemaBase):\n    def materialization(self):\n        return \"incremental\"\n\n    def table_type(self):\n        return \"BASE TABLE\"\n"
  },
  {
    "path": "tests/functional/adapter/test_ephemeral.py",
    "content": "from dbt.tests.adapter.ephemeral.test_ephemeral import (\n    BaseEphemeralErrorHandling,\n    BaseEphemeralMulti,\n    BaseEphemeralNested,\n)\nfrom dbt.tests.util import check_relations_equal, run_dbt\n\n\nclass TestEphemeralMultiTrino(BaseEphemeralMulti):\n    def test_ephemeral_multi(self, project):\n        run_dbt([\"seed\"])\n        results = run_dbt([\"run\"])\n        assert len(results) == 3\n        check_relations_equal(\n            project.adapter, [\"SEED\", \"DEPENDENT\", \"DOUBLE_DEPENDENT\", \"SUPER_DEPENDENT\"]\n        )\n\n\nclass TestEphemeralNestedTrino(BaseEphemeralNested):\n    def test_ephemeral_nested(self, project):\n        results = run_dbt([\"run\"])\n        assert len(results) == 2\n\n\nclass TestEphemeralErrorHandlingTrino(BaseEphemeralErrorHandling):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_get_incremental_tmp_relation_type_macro.py",
    "content": "from abc import ABC, abstractmethod\n\nimport pytest\nfrom dbt.tests.util import run_dbt, run_sql_with_adapter\n\n\nclass CustomSchemaBase(ABC):\n    \"\"\"\n    This test is meant to ensure that get_incremental_tmp_relation_type macro\n    is returning expected values on certain inputs.\n    \"\"\"\n\n    @property\n    @abstractmethod\n    def expected_types(self):\n        # Expected table/view type returned from created model.\n        # Order based on columns' order in model definition.\n        return [\"table\", \"view\", \"view\", \"view\", \"table\", \"view\", \"table\"]\n\n    # define model\n    def incremental_model(self):\n        return \"\"\"\n                    select\n                    '{{ get_incremental_tmp_relation_type('delete+insert', 'foo', 'sql') }}' AS delete_plus_insert_strategy,\n                    '{{ get_incremental_tmp_relation_type('append', 'foo', 'sql') }}' AS append_strategy,\n                    '{{ get_incremental_tmp_relation_type('default', 'foo', 'sql') }}' AS default_strategy,\n                    '{{ get_incremental_tmp_relation_type('merge', 'foo', 'sql') }}' AS merge_strategy,\n                    '{{ get_incremental_tmp_relation_type('foo', 'some_unique_key', 'sql') }}' AS unique_key,\n                    '{{ get_incremental_tmp_relation_type('foo', None, 'sql') }}' AS no_unique_key,\n                    '{{ get_incremental_tmp_relation_type('default', 'foo', 'python') }}' AS python_model\n                \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    @abstractmethod\n    def project_config_update(self):\n        pass\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\"test_get_incremental_tmp_relation_type.sql\": self.incremental_model()}\n\n    def test_get_incremental_tmp_relation_type(self, project):\n        # Run models.\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Fetch info from get_incremental_tmp_relation_type macro output.\n        sql = f\"\"\"\n            select * from {project.adapter.config.credentials.database}.{project.adapter.config.credentials.schema}.test_get_incremental_tmp_relation_type\n        \"\"\".strip()\n        results = run_sql_with_adapter(project.adapter, sql, fetch=\"all\")\n\n        # Check if fetched info is as expected to be.\n        assert len(results) == 1\n        assert results[0] == self.expected_types\n\n\nclass TestViewsEnabled(CustomSchemaBase):\n    @property\n    def expected_types(self):\n        return super().expected_types\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        # Not specifying views_enabled config,\n        # as it is 'True' by default\n        pass\n\n\nclass TestViewsNotEnabled(CustomSchemaBase):\n    @property\n    def expected_types(self):\n        # Expected type is 'table' for every config,\n        # as views_enabled is set to 'False'.\n        return [\"table\" for _ in super().expected_types]\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"models\": {\"+views_enabled\": False},\n        }\n"
  },
  {
    "path": "tests/functional/adapter/test_grants.py",
    "content": "import pytest\nfrom dbt.context.base import BaseContext  # diff_of_two_dicts only\nfrom dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants\nfrom dbt.tests.adapter.grants.test_model_grants import BaseModelGrants\n\n\n@pytest.mark.hive\n# TODO: setup Galaxy and Starbust tests\n#   See https://github.com/starburstdata/dbt-trino/issues/147\n#   and also https://github.com/starburstdata/dbt-trino/issues/146\n@pytest.mark.skip_profile(\"starburst_galaxy\")\n# To run this test locally add following env vars:\n# DBT_TEST_USER_1=user1\n# DBT_TEST_USER_2=user2\n# DBT_TEST_USER_3=user3\nclass TestModelGrantsTrino(BaseModelGrants):\n    def assert_expected_grants_match_actual(self, project, relation_name, expected_grants):\n        actual_grants = self.get_grants_on_relation(project, relation_name)\n        # Remove the creation user\n        try:\n            for privilege in [\"delete\", \"update\", \"insert\", \"select\"]:\n                if privilege in actual_grants:\n                    actual_grants[privilege].remove(\"admin\")\n                    if len(actual_grants[privilege]) == 0:\n                        del actual_grants[privilege]\n        except ValueError:\n            pass\n\n        # need a case-insensitive comparison\n        # so just a simple \"assert expected == actual_grants\" won't work\n        diff_a = BaseContext.diff_of_two_dicts(actual_grants, expected_grants)\n        diff_b = BaseContext.diff_of_two_dicts(expected_grants, actual_grants)\n        assert diff_a == diff_b == {}\n\n\n@pytest.mark.hive\n# TODO: setup Galaxy and Starbust tests, might need separate tests\n#   See https://github.com/starburstdata/dbt-trino/issues/147\n#   and also https://github.com/starburstdata/dbt-trino/issues/146\n@pytest.mark.skip(reason=\"Hive doesn't raise errors on invalid roles\")\nclass TestInvalidGrantsTrino(BaseInvalidGrants):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_query_comments.py",
    "content": "from dbt.tests.adapter.query_comment.test_query_comment import (\n    BaseEmptyQueryComments,\n    BaseMacroArgsQueryComments,\n    BaseMacroInvalidQueryComments,\n    BaseMacroQueryComments,\n    BaseNullQueryComments,\n    BaseQueryComments,\n)\n\n\nclass TestQueryCommentsTrino(BaseQueryComments):\n    pass\n\n\nclass TestMacroQueryCommentsTrino(BaseMacroQueryComments):\n    pass\n\n\nclass TestMacroArgsQueryCommentsTrino(BaseMacroArgsQueryComments):\n    pass\n\n\nclass TestMacroInvalidQueryCommentsTrino(BaseMacroInvalidQueryComments):\n    pass\n\n\nclass TestNullQueryCommentsTrino(BaseNullQueryComments):\n    pass\n\n\nclass TestEmptyQueryCommentsTrino(BaseEmptyQueryComments):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_quote_policy.py",
    "content": "import pytest\n\nfrom tests.functional.adapter.test_basic import TestIncrementalTrino\n\n\n@pytest.fixture(scope=\"class\")\ndef unique_schema(request, prefix) -> str:\n    return \"sChEmAWiThMiXeDCaSe\"\n\n\nclass TestTrinoQuotePolicy(TestIncrementalTrino):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_sample_mode.py",
    "content": "from dbt.tests.adapter.sample_mode.test_sample_mode import BaseSampleModeTest\n\n\nclass TestTrinoSampleMode(BaseSampleModeTest):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_seeds_column_types_overrides.py",
    "content": "import pytest\nfrom dbt.tests.util import get_connection, relation_from_name, run_dbt\n\nboolean_type = \"\"\"\nboolean_example\ntrue\n\"\"\".lstrip()\n\ndatetime_type = \"\"\"\ndate_example,time_example,time_p_example,time_tz_example,timestamp_example,timestamp_p_example,timestamp_tz_example,timestamp_p_tz_example,interval_ym_example,interval_ds_example\n2018-01-05,01:02:03.456,01:02:03.456789,01:02:03.456 -08:00,2020-06-10 15:55:23.383,2020-06-10 15:55:23.383345,2001-08-22 03:04:05.321-08:00,2001-08-22 03:04:05.321456-08:00,'3' MONTH,'2' DAY\n,,,,,,,,,\n\"\"\".lstrip()\n\nnumber_type = \"\"\"\ninteger_example,tinyint_example,smallint_example,bigint_example,real_example,double_example,decimal_example,decimal_p_example\n1,2,3,4,10.3e0,10.3e0,1.1,1.23\n,,,,,,\n\"\"\".lstrip()\n\nstring_type = \"\"\"varchar_example,varchar_n_example,char_example,char_n_example,varbinary_example,json_example\ntest,abc,d,ghi,65683F,\"{\"\"k1\"\":1,\"\"k2\"\":23,\"\"k3\"\":456}\"\n,,,,,\n\"\"\".lstrip()\n\nseed_types = {\n    \"boolean_type\": {\n        \"boolean_example\": \"boolean\",\n    },\n    \"datetime_type\": {\n        \"date_example\": \"date\",\n        \"time_example\": \"time\",\n        \"time_p_example\": \"time(6)\",\n        \"time_tz_example\": \"time with time zone\",\n        \"timestamp_example\": \"timestamp\",\n        \"timestamp_p_example\": \"timestamp(6)\",\n        \"timestamp_tz_example\": \"timestamp with time zone\",\n        \"timestamp_p_tz_example\": \"timestamp(6) with time zone\",\n        \"interval_ym_example\": \"interval year to month\",\n        \"interval_ds_example\": \"interval day to second\",\n    },\n    \"number_type\": {\n        \"integer_example\": \"integer\",\n        \"tinyint_example\": \"tinyint\",\n        \"smallint_example\": \"smallint\",\n        \"bigint_example\": \"bigint\",\n        \"real_example\": \"real\",\n        \"double_example\": \"double\",\n        \"decimal_example\": \"decimal\",\n        \"decimal_p_example\": \"decimal(3,2)\",\n    },\n    \"string_type\": {\n        \"varchar_example\": \"varchar\",\n        \"varchar_n_example\": \"varchar(10)\",\n        \"char_example\": \"char\",\n        \"char_n_example\": \"char(10)\",\n        \"varbinary_example\": \"varbinary\",\n        \"json_example\": \"json\",\n    },\n}\n\n\n# function copied from dbt.tests.util. Original function doesn't return numeric_precision and numeric_scale.\ndef get_relation_columns(adapter, name):\n    relation = relation_from_name(adapter, name)\n    with get_connection(adapter):\n        columns = adapter.get_columns_in_relation(relation)\n        return sorted(\n            (\n                (c.name, c.dtype, c.char_size, c.numeric_precision, c.numeric_scale)\n                for c in columns\n            ),\n            key=lambda x: x[0],\n        )\n\n\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestSeedsColumnTypesOverrides:\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"seeds\": {\n                \"test\": {\n                    \"boolean_type\": {\"+column_types\": seed_types[\"boolean_type\"]},\n                    \"datetime_type\": {\"+column_types\": seed_types[\"datetime_type\"]},\n                    \"number_type\": {\"+column_types\": seed_types[\"number_type\"]},\n                    \"string_type\": {\"+column_types\": seed_types[\"string_type\"]},\n                }\n            }\n        }\n\n    # everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"boolean_type.csv\": boolean_type,\n            \"datetime_type.csv\": datetime_type,\n            \"number_type.csv\": number_type,\n            \"string_type.csv\": string_type,\n        }\n\n    def test_seeds_column_overrides(self, project):\n        # seed seeds\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 4\n\n        actual_columns = {}\n        for seed_name, seed_columns in seed_types.items():\n            # retrieve information about columns from trino\n            actual_columns[seed_name] = get_relation_columns(project.adapter, seed_name)\n\n        assert actual_columns == {\n            \"boolean_type\": [\n                (\"boolean_example\", \"boolean\", None, None, None),\n            ],\n            \"datetime_type\": [\n                (\"date_example\", \"date\", None, None, None),\n                (\"interval_ds_example\", \"interval day to second\", None, None, None),\n                (\"interval_ym_example\", \"interval year to month\", None, None, None),\n                (\"time_example\", \"time(3)\", None, None, None),\n                (\"time_p_example\", \"time(6)\", None, None, None),\n                (\"time_tz_example\", \"time(3) with time zone\", None, None, None),\n                (\"timestamp_example\", \"timestamp(3)\", None, None, None),\n                (\"timestamp_p_example\", \"timestamp(6)\", None, None, None),\n                (\"timestamp_p_tz_example\", \"timestamp(6) with time zone\", None, None, None),\n                (\"timestamp_tz_example\", \"timestamp(3) with time zone\", None, None, None),\n            ],\n            \"number_type\": [\n                (\"bigint_example\", \"bigint\", None, None, None),\n                (\"decimal_example\", \"decimal\", None, 38, 0),\n                (\"decimal_p_example\", \"decimal\", None, 3, 2),\n                (\"double_example\", \"double\", None, None, None),\n                (\"integer_example\", \"integer\", None, None, None),\n                (\"real_example\", \"real\", None, None, None),\n                (\"smallint_example\", \"smallint\", None, None, None),\n                (\"tinyint_example\", \"tinyint\", None, None, None),\n            ],\n            \"string_type\": [\n                (\"char_example\", \"char\", 1, None, None),\n                (\"char_n_example\", \"char\", 10, None, None),\n                (\"json_example\", \"json\", None, None, None),\n                (\"varbinary_example\", \"varbinary\", None, None, None),\n                (\"varchar_example\", \"varchar\", None, None, None),\n                (\"varchar_n_example\", \"varchar\", 10, None, None),\n            ],\n        }\n"
  },
  {
    "path": "tests/functional/adapter/test_session_property.py",
    "content": "import pytest\nfrom dbt.tests.util import run_dbt\n\nset_session_property = \"set session query_max_run_time='20s'\"\n\n\nclass TestSessionProperty:\n    \"\"\"\n    This test is ensuring that setting session properties through pre_hook is working as expected.\n    Test is asserting, that session property passed in 'pre_hook' config in model definition\n    matches pre_hook value extracted from RunExecutionResult object.\n    \"\"\"\n\n    @property\n    def schema(self):\n        return \"default\"\n\n    def session_property_model(self, prehook):\n        return f\"\"\"\n                    {{{{\n                        config(\n                            pre_hook=\"{prehook}\"\n                        )\n                    }}}}\n                    select 'OK' as status\n                \"\"\"\n\n    # everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\"session_property_model.sql\": self.session_property_model(set_session_property)}\n\n    def test_custom_schema_trino(self, project):\n        # Run models.\n        results = run_dbt([\"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert set_session_property == results.results[0].node.config.pre_hook[0].sql\n"
  },
  {
    "path": "tests/functional/adapter/test_simple_copy.py",
    "content": "import pytest\nfrom dbt.tests.adapter.simple_copy.test_simple_copy import (\n    EmptyModelsArentRunBase,\n    SimpleCopyBase,\n)\nfrom dbt.tests.util import run_dbt\n\n\n@pytest.mark.iceberg\nclass TestSimpleCopyBase(SimpleCopyBase):\n    def test_simple_copy_with_materialized_views(self, project):\n        project.run_sql(f\"create table {project.test_schema}.unrelated_table (id int)\")\n        sql = f\"\"\"\n            create materialized view {project.test_schema}.unrelated_materialized_view as (\n                select * from {project.test_schema}.unrelated_table\n            )\n        \"\"\"\n        project.run_sql(sql)\n        sql = f\"\"\"\n            create view {project.test_schema}.unrelated_view as (\n                select * from {project.test_schema}.unrelated_materialized_view\n            )\n        \"\"\"\n        project.run_sql(sql)\n        results = run_dbt([\"seed\"])\n        assert len(results) == 1\n        results = run_dbt()\n        assert len(results) == 7\n\n        # clean up\n        # TODO: check if this clean-up is still needed\n        #  after implementing CASCADE in iceberg, delta, hive connectors\n        #  if not, entire method could be deleted\n        project.run_sql(\"drop view unrelated_view\")\n        project.run_sql(\"drop materialized view unrelated_materialized_view\")\n        project.run_sql(\"drop table unrelated_table\")\n\n\n# Trino implementation of dbt.tests.fixtures.project.TestProjInfo.get_tables_in_schema\n# which use `like` instead of `ilike`\ndef trino_get_tables_in_schema(prj):\n    sql = \"\"\"\n            select table_name,\n                    case when table_type = 'BASE TABLE' then 'table'\n                         when table_type = 'VIEW' then 'view'\n                         else table_type\n                    end as materialization\n            from information_schema.tables\n            where {}\n            order by table_name\n            \"\"\"\n    sql = sql.format(\"lower({}) like lower('{}')\".format(\"table_schema\", prj.test_schema))\n    result = prj.run_sql(sql, fetch=\"all\")\n    return {model_name: materialization for (model_name, materialization) in result}\n\n\nclass TestEmptyModelsArentRun(EmptyModelsArentRunBase):\n    def test_dbt_doesnt_run_empty_models(self, project):\n        results = run_dbt([\"seed\"])\n        assert len(results) == 1\n        results = run_dbt()\n        assert len(results) == 7\n\n        tables = trino_get_tables_in_schema(project)\n\n        assert \"empty\" not in tables.keys()\n        assert \"disabled\" not in tables.keys()\n"
  },
  {
    "path": "tests/functional/adapter/test_simple_snapshot.py",
    "content": "import pytest\nfrom dbt.tests.adapter.simple_snapshot.test_snapshot import (\n    BaseSimpleSnapshot,\n    BaseSnapshotCheck,\n)\nfrom dbt.tests.util import run_dbt\n\niceberg_macro_override_sql = \"\"\"\n{% macro trino__current_timestamp() -%}\n    current_timestamp(6)\n{%- endmacro %}\n\"\"\"\n\n\nclass TrinoSimpleSnapshot(BaseSimpleSnapshot):\n    def test_updates_are_captured_by_snapshot(self, project):\n        \"\"\"\n        Update the last 5 records. Show that all ids are current, but the last 5 reflect updates.\n        \"\"\"\n        self.update_fact_records(\n            {\"updated_at\": \"updated_at + interval '1' day\"}, \"id between 16 and 20\"\n        )\n        run_dbt([\"snapshot\"])\n        self._assert_results(\n            ids_with_current_snapshot_records=range(1, 21),\n            ids_with_closed_out_snapshot_records=range(16, 21),\n        )\n\n    def test_new_column_captured_by_snapshot(self, project):\n        \"\"\"\n        Add a column to `fact` and populate the last 10 records with a non-null value.\n        Show that all ids are current, but the last 10 reflect updates and the first 10 don't\n        i.e. if the column is added, but not updated, the record doesn't reflect that it's updated\n        \"\"\"\n        self.add_fact_column(\"full_name\", \"varchar(200)\")\n        self.update_fact_records(\n            {\n                \"full_name\": \"first_name || ' ' || last_name\",\n                \"updated_at\": \"updated_at + interval '1' day\",\n            },\n            \"id between 11 and 20\",\n        )\n        run_dbt([\"snapshot\"])\n        self._assert_results(\n            ids_with_current_snapshot_records=range(1, 21),\n            ids_with_closed_out_snapshot_records=range(11, 21),\n        )\n\n\nclass TrinoSnapshotCheck(BaseSnapshotCheck):\n    def test_column_selection_is_reflected_in_snapshot(self, project):\n        \"\"\"\n        Update the first 10 records on a non-tracked column.\n        Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both)\n        Show that all ids are current, and only the tracked column updates are reflected in `snapshot`.\n        \"\"\"\n        self.update_fact_records(\n            {\"last_name\": \"substring(last_name, 1, 3)\"}, \"id between 1 and 10\"\n        )  # not tracked\n        self.update_fact_records(\n            {\"email\": \"substring(email, 1, 3)\"}, \"id between 6 and 15\"\n        )  # tracked\n        run_dbt([\"snapshot\"])\n        self._assert_results(\n            ids_with_current_snapshot_records=range(1, 21),\n            ids_with_closed_out_snapshot_records=range(6, 16),\n        )\n\n\n@pytest.mark.iceberg\nclass TestIcebergSimpleSnapshot(TrinoSimpleSnapshot):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"seeds\": {\n                \"+column_types\": {\"updated_at\": \"timestamp(6)\"},\n            },\n        }\n\n\n@pytest.mark.delta\nclass TestDeltaSimpleSnapshot(TrinoSimpleSnapshot):\n    pass\n\n\n@pytest.mark.iceberg\nclass TestIcebergSnapshotCheck(TrinoSnapshotCheck):\n    @pytest.fixture(scope=\"class\")\n    def macros(self):\n        return {\"iceberg.sql\": iceberg_macro_override_sql}\n\n\n@pytest.mark.delta\nclass TestDeltaSnapshotCheck(TrinoSnapshotCheck):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/test_sql_status_output.py",
    "content": "import pytest\nfrom dbt.tests.util import run_dbt, run_dbt_and_capture\n\nseed_csv = \"\"\"\nid,name,some_date\n1,Easton,1981-05-20 06:46:51\n2,Lillian,1978-09-03 18:10:33\n3,Jeremiah,1982-03-11 03:59:51\n4,Nolan,1976-05-06 20:21:35\n\"\"\".lstrip()\n\nmodel_sql = \"\"\"\nselect * from {{ ref('seed') }}\n\"\"\"\n\n\nclass TestSqlStatusOutput:\n    \"\"\"\n    Testing if SQL status output contains update_type and rowcount\n    \"\"\"\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"materialization_table.sql\": model_sql,\n            \"materialization_view.sql\": model_sql,\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"sql_status_output\",\n            \"models\": {\n                \"sql_status_output\": {\n                    \"materialization_table\": {\"+materialized\": \"table\"},\n                    \"materialization_view\": {\"+materialized\": \"view\"},\n                }\n            },\n        }\n\n    def test_run_seed_test(self, project):\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        results, logs = run_dbt_and_capture([\"--no-use-colors\", \"run\"], expect_pass=True)\n        assert len(results) == 2\n        assert (\n            f\" of 2 OK created sql table model {project.test_schema}.materialization_table  [CREATE TABLE (4 rows) in \"\n            in logs\n        )\n        assert (\n            f\" of 2 OK created sql view model {project.test_schema}.materialization_view  [CREATE VIEW in \"\n            in logs\n        )\n"
  },
  {
    "path": "tests/functional/adapter/test_table_properties.py",
    "content": "import pytest\nfrom dbt.tests.util import run_dbt, run_dbt_and_capture\n\nfrom tests.functional.adapter.materialization.fixtures import model_sql, seed_csv\n\n\nclass BaseTableProperties:\n    # Everything that goes in the \"seeds\" directory\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\n            \"seed.csv\": seed_csv,\n        }\n\n    # Everything that goes in the \"models\" directory\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"model.sql\": model_sql,\n        }\n\n\n@pytest.mark.iceberg\nclass TestTableProperties(BaseTableProperties):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"properties_test\",\n            \"models\": {\n                \"+materialized\": \"table\",\n                \"+properties\": {\n                    \"format\": \"'PARQUET'\",\n                    \"format_version\": \"2\",\n                },\n            },\n        }\n\n    def test_table_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"WITH (\" in logs\n        assert \"format = 'PARQUET'\" in logs\n        assert \"format_version = 2\" in logs\n\n\n@pytest.mark.iceberg\nclass TestFileFormatConfig(BaseTableProperties):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"properties_test\",\n            \"models\": {\n                \"+materialized\": \"table\",\n                \"file_format\": \"parquet\",\n            },\n        }\n\n    def test_table_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"WITH (\" in logs\n        assert \"format = 'parquet'\" in logs\n\n\n@pytest.mark.iceberg\nclass TestFileFormatConfigAndFormatTablePropertyFail(BaseTableProperties):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"properties_test\",\n            \"models\": {\n                \"+materialized\": \"table\",\n                \"+properties\": {\n                    \"format\": \"'PARQUET'\",\n                },\n                \"file_format\": \"orc\",\n            },\n        }\n\n    def test_table_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=False)\n        assert len(results) == 1\n        assert (\n            \"You can specify either 'file_format' or 'properties.format' configurations, but not both.\"\n            in logs\n        )\n\n\n@pytest.mark.hive\n# Setting `type` property is available only in Starburst Galaxy\n# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html\n@pytest.mark.skip_profile(\"trino_starburst\")\nclass TestTableFormatConfig(BaseTableProperties):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"properties_test\",\n            \"models\": {\n                \"+materialized\": \"table\",\n                \"table_format\": \"iceberg\",\n            },\n        }\n\n    def test_table_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=True)\n        assert len(results) == 1\n        assert \"WITH (\" in logs\n        assert \"type = 'iceberg'\" in logs\n\n\n@pytest.mark.hive\n# Setting `type` property is available only in Starburst Galaxy\n# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html\n@pytest.mark.skip_profile(\"trino_starburst\")\nclass TestTableFormatConfigAndTypeTablePropertyFail(BaseTableProperties):\n    # Configuration in dbt_project.yml\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"properties_test\",\n            \"models\": {\n                \"+materialized\": \"table\",\n                \"+properties\": {\n                    \"type\": \"'iceberg'\",\n                },\n                \"table_format\": \"iceberg\",\n            },\n        }\n\n    def test_table_properties(self, project):\n        # Seed seed\n        results = run_dbt([\"seed\"], expect_pass=True)\n        assert len(results) == 1\n\n        # Create model with properties\n        results, logs = run_dbt_and_capture([\"--debug\", \"run\"], expect_pass=False)\n        assert len(results) == 1\n        assert (\n            \"You can specify either 'table_format' or 'properties.type' configurations, but not both.\"\n            in logs\n        )\n"
  },
  {
    "path": "tests/functional/adapter/unit_testing/test_unit_testing.py",
    "content": "import pytest\nfrom dbt.tests.adapter.unit_testing.test_case_insensitivity import (\n    BaseUnitTestCaseInsensivity,\n)\nfrom dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput\nfrom dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes\n\n\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestTrinoUnitTestingTypesTrinoStarburst(BaseUnitTestingTypes):\n    @pytest.fixture\n    def data_types(self):\n        # sql_value, yaml_value\n        return [\n            [\"1\", \"1\"],\n            [\"'1'\", \"1\"],\n            [\"true\", \"true\"],\n            [\"DATE '2020-01-02'\", \"2020-01-02\"],\n            [\"TIMESTAMP '2013-11-03 00:00:00'\", \"2013-11-03 00:00:00\"],\n            [\"TIMESTAMP '2013-11-03 00:00:00-0'\", \"2013-11-03 00:00:00-0\"],\n            [\"DECIMAL '1'\", \"1\"],\n            [\n                \"\"\"JSON '{\"bar\": \"baz\", \"balance\": 7.77, \"active\": false}'\"\"\",\n                \"\"\"'{\"bar\": \"baz\", \"balance\": 7.77, \"active\": false}'\"\"\",\n            ],\n        ]\n\n\n# JSON type is not supported on object storage connectors\n@pytest.mark.skip_profile(\"trino_starburst\")\nclass TestTrinoUnitTestingTypesGalaxy(BaseUnitTestingTypes):\n    @pytest.fixture\n    def data_types(self):\n        # sql_value, yaml_value\n        return [\n            [\"1\", \"1\"],\n            [\"'1'\", \"1\"],\n            [\"true\", \"true\"],\n            [\"DATE '2020-01-02'\", \"2020-01-02\"],\n            [\"TIMESTAMP '2013-11-03 00:00:00'\", \"2013-11-03 00:00:00\"],\n            [\"TIMESTAMP '2013-11-03 00:00:00-0'\", \"2013-11-03 00:00:00-0\"],\n            [\"DECIMAL '1'\", \"1\"],\n        ]\n\n\nclass TestTrinoUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity):\n    pass\n\n\nclass TestTrinoUnitTestInvalidInput(BaseUnitTestInvalidInput):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/utils/fixture_date_spine.py",
    "content": "# If date_spine works properly, there should be no `null` values in the resulting model\n\nmodels__trino_test_date_spine_sql = \"\"\"\nwith generated_dates as (\n    {{ date_spine(\"day\", \"'2023-09-01'\", \"'2023-09-10'\") }}\n), expected_dates as (\n    select cast('2023-09-01' as date) as expected\n    union all\n    select cast('2023-09-02' as date) as expected\n    union all\n    select cast('2023-09-03' as date) as expected\n    union all\n    select cast('2023-09-04' as date) as expected\n    union all\n    select cast('2023-09-05' as date) as expected\n    union all\n    select cast('2023-09-06' as date) as expected\n    union all\n    select cast('2023-09-07' as date) as expected\n    union all\n    select cast('2023-09-08' as date) as expected\n    union all\n    select cast('2023-09-09' as date) as expected\n), joined as (\n    select\n        generated_dates.date_day,\n        expected_dates.expected\n    from generated_dates\n    left join expected_dates on generated_dates.date_day = expected_dates.expected\n)\n\nSELECT * from joined\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/utils/fixture_get_intervals_between.py",
    "content": "models__trino_test_get_intervals_between_sql = \"\"\"\nSELECT\n  {{ get_intervals_between(\"'2023-09-01'\", \"'2023-09-12'\", \"day\") }} as intervals,\n  11 as expected\n\n\"\"\"\n"
  },
  {
    "path": "tests/functional/adapter/utils/test_data_types.py",
    "content": "import pytest\nfrom dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt\nfrom dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean\nfrom dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat\nfrom dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt\nfrom dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric\nfrom dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString\nfrom dbt.tests.adapter.utils.data_types.test_type_timestamp import BaseTypeTimestamp\n\n\nclass TestTypeBigInt(BaseTypeBigInt):\n    pass\n\n\nclass TestTypeFloat(BaseTypeFloat):\n    pass\n\n\nclass TestTypeInt(BaseTypeInt):\n    pass\n\n\nclass TestTypeNumeric(BaseTypeNumeric):\n    def numeric_fixture_type(self):\n        return \"decimal(28,6)\"\n\n\nclass TestTypeString(BaseTypeString):\n    pass\n\n\n# TODO: Re-enable when https://github.com/trinodb/trino/pull/13981 is merged\n@pytest.mark.skip_profile(\"starburst_galaxy\")\nclass TestTypeTimestamp(BaseTypeTimestamp):\n    pass\n\n\nclass TestTypeBoolean(BaseTypeBoolean):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/utils/test_date_spine.py",
    "content": "import pytest\nfrom dbt.tests.adapter.utils.base_utils import BaseUtils\nfrom dbt.tests.adapter.utils.fixture_date_spine import models__test_date_spine_yml\n\nfrom tests.functional.adapter.utils.fixture_date_spine import (\n    models__trino_test_date_spine_sql,\n)\n\n\nclass BaseDateSpine(BaseUtils):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"test_date_spine.yml\": models__test_date_spine_yml,\n            \"test_date_spine.sql\": self.interpolate_macro_namespace(\n                models__trino_test_date_spine_sql, \"date_spine\"\n            ),\n        }\n\n\nclass TestDateSpine(BaseDateSpine):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/utils/test_get_intervals_between.py",
    "content": "import pytest\nfrom dbt.tests.adapter.utils.base_utils import BaseUtils\nfrom dbt.tests.adapter.utils.fixture_get_intervals_between import (\n    models__test_get_intervals_between_yml,\n)\n\nfrom tests.functional.adapter.utils.fixture_get_intervals_between import (\n    models__trino_test_get_intervals_between_sql,\n)\n\n\nclass BaseGetIntervalsBetween(BaseUtils):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"test_get_intervals_between.yml\": models__test_get_intervals_between_yml,\n            \"test_get_intervals_between.sql\": self.interpolate_macro_namespace(\n                models__trino_test_get_intervals_between_sql, \"get_intervals_between\"\n            ),\n        }\n\n\nclass TestGetIntervalsBetween(BaseGetIntervalsBetween):\n    pass\n"
  },
  {
    "path": "tests/functional/adapter/utils/test_timestamps.py",
    "content": "import pytest\nfrom dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps\n\n\nclass TestCurrentTimestampTrino(BaseCurrentTimestamps):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"get_current_timestamp.sql\": 'select {{ current_timestamp() }} as \"current_timestamp\"'\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def expected_schema(self):\n        return {\"current_timestamp\": \"timestamp(3) with time zone\"}\n\n    @pytest.fixture(scope=\"class\")\n    def expected_sql(self):\n        return 'select current_timestamp as \"current_timestamp\"'\n"
  },
  {
    "path": "tests/functional/adapter/utils/test_utils.py",
    "content": "import pytest\nfrom dbt.tests.adapter.utils.fixture_datediff import models__test_datediff_yml\nfrom dbt.tests.adapter.utils.test_any_value import BaseAnyValue\nfrom dbt.tests.adapter.utils.test_array_append import BaseArrayAppend\nfrom dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat\nfrom dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct\nfrom dbt.tests.adapter.utils.test_bool_or import BaseBoolOr\nfrom dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText\nfrom dbt.tests.adapter.utils.test_concat import BaseConcat\nfrom dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware\nfrom dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc\nfrom dbt.tests.adapter.utils.test_dateadd import BaseDateAdd\nfrom dbt.tests.adapter.utils.test_datediff import BaseDateDiff\nfrom dbt.tests.adapter.utils.test_equals import BaseEquals\nfrom dbt.tests.adapter.utils.test_escape_single_quotes import (\n    BaseEscapeSingleQuotesQuote,\n)\nfrom dbt.tests.adapter.utils.test_except import BaseExcept\nfrom dbt.tests.adapter.utils.test_generate_series import BaseGenerateSeries\nfrom dbt.tests.adapter.utils.test_get_powers_of_two import BaseGetPowersOfTwo\nfrom dbt.tests.adapter.utils.test_hash import BaseHash\nfrom dbt.tests.adapter.utils.test_intersect import BaseIntersect\nfrom dbt.tests.adapter.utils.test_last_day import BaseLastDay\nfrom dbt.tests.adapter.utils.test_length import BaseLength\nfrom dbt.tests.adapter.utils.test_listagg import BaseListagg\nfrom dbt.tests.adapter.utils.test_position import BasePosition\nfrom dbt.tests.adapter.utils.test_replace import BaseReplace\nfrom dbt.tests.adapter.utils.test_right import BaseRight\nfrom dbt.tests.adapter.utils.test_safe_cast import BaseSafeCast\nfrom dbt.tests.adapter.utils.test_split_part import BaseSplitPart\nfrom dbt.tests.adapter.utils.test_string_literal import BaseStringLiteral\nfrom dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod\n\nfrom tests.functional.adapter.fixture_datediff import (\n    models__test_datediff_sql,\n    seeds__data_datediff_csv,\n)\n\nmodels__array_append_expected_sql = \"\"\"\nselect 1 as id, {{ array_construct([1,2,3,4]) }} as array_col\n\"\"\"\n\n\nmodels__array_append_actual_sql = \"\"\"\nselect 1 as id, {{ array_append(array_construct([1,2,3]), 4) }} as array_col\n\"\"\"\n\nmodels__array_concat_expected_sql = \"\"\"\nselect 1 as id, {{ array_construct([1,2,3,4,5,6]) }} as array_col\n\"\"\"\n\n\nmodels__array_concat_actual_sql = \"\"\"\nselect 1 as id, {{ array_concat(array_construct([1,2,3]), array_construct([4,5,6])) }} as array_col\n\"\"\"\n\n\nclass TestAnyValue(BaseAnyValue):\n    pass\n\n\n# Only partially because of https://github.com/trinodb/trino/issues/13\n# No way to concat an array with null or empty array\nclass TestArrayAppend(BaseArrayAppend):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"actual.sql\": models__array_append_actual_sql,\n            \"expected.sql\": models__array_append_expected_sql,\n        }\n\n\n# Only partially because of https://github.com/trinodb/trino/issues/13\n# No way to concat an array with null or empty array\nclass TestArrayConcat(BaseArrayConcat):\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"actual.sql\": models__array_concat_actual_sql,\n            \"expected.sql\": models__array_concat_expected_sql,\n        }\n\n\nclass TestArrayConstruct(BaseArrayConstruct):\n    pass\n\n\nclass TestBoolOr(BaseBoolOr):\n    pass\n\n\nclass TestCastBoolToText(BaseCastBoolToText):\n    pass\n\n\nclass TestConcat(BaseConcat):\n    pass\n\n\nclass TestCurrentTimestamp(BaseCurrentTimestampAware):\n    pass\n\n\nclass TestDateAdd(BaseDateAdd):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"test_date_add\",\n            \"seeds\": {\n                \"+column_types\": {\n                    \"from_time\": \"timestamp(6)\",\n                    \"result\": \"timestamp(6)\",\n                },\n            },\n        }\n\n\nclass TestDateDiff(BaseDateDiff):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"test_date_diff\",\n            \"seeds\": {\n                \"+column_types\": {\"first_date\": \"timestamp(6)\", \"second_date\": \"timestamp(6)\"},\n            },\n        }\n\n    @pytest.fixture(scope=\"class\")\n    def seeds(self):\n        return {\"data_datediff.csv\": seeds__data_datediff_csv}\n\n    @pytest.fixture(scope=\"class\")\n    def models(self):\n        return {\n            \"test_datediff.yml\": models__test_datediff_yml,\n            \"test_datediff.sql\": self.interpolate_macro_namespace(\n                models__test_datediff_sql, \"datediff\"\n            ),\n        }\n\n\nclass TestDateTrunc(BaseDateTrunc):\n    @pytest.fixture(scope=\"class\")\n    def project_config_update(self):\n        return {\n            \"name\": \"test_date_diff\",\n            \"seeds\": {\n                \"+column_types\": {\"updated_at\": \"timestamp(6)\"},\n            },\n        }\n\n\nclass TestEquals(BaseEquals):\n    pass\n\n\nclass TestEscapeSingleQuotes(BaseEscapeSingleQuotesQuote):\n    pass\n\n\nclass TestExcept(BaseExcept):\n    pass\n\n\nclass TestGenerateSeries(BaseGenerateSeries):\n    pass\n\n\nclass TestGetPowersOfTwo(BaseGetPowersOfTwo):\n    pass\n\n\nclass TestHash(BaseHash):\n    pass\n\n\nclass TestIntersect(BaseIntersect):\n    pass\n\n\nclass TestLastDay(BaseLastDay):\n    pass\n\n\nclass TestLength(BaseLength):\n    pass\n\n\nclass TestListagg(BaseListagg):\n    pass\n\n\nclass TestPosition(BasePosition):\n    pass\n\n\nclass TestReplace(BaseReplace):\n    pass\n\n\nclass TestRight(BaseRight):\n    pass\n\n\nclass TestSafeCast(BaseSafeCast):\n    pass\n\n\nclass TestSplitPart(BaseSplitPart):\n    pass\n\n\nclass TestStringLiteral(BaseStringLiteral):\n    pass\n\n\nclass TestValidateSqlMethod(BaseValidateSqlMethod):\n    pass\n"
  },
  {
    "path": "tests/unit/__init__.py",
    "content": ""
  },
  {
    "path": "tests/unit/test_adapter.py",
    "content": "import string\nimport unittest\nfrom multiprocessing import get_context\nfrom unittest import TestCase\nfrom unittest.mock import MagicMock, Mock, patch\n\nimport agate\nimport dbt.flags as flags\nimport trino\nfrom dbt.adapters.exceptions.connection import FailedToConnectError\nfrom dbt_common.clients import agate_helper\nfrom dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError\n\nfrom dbt.adapters.trino import TrinoAdapter\nfrom dbt.adapters.trino.column import TRINO_VARCHAR_MAX_LENGTH, TrinoColumn\nfrom dbt.adapters.trino.connections import (\n    HttpScheme,\n    TrinoCertificateCredentials,\n    TrinoJwtCredentials,\n    TrinoKerberosCredentials,\n    TrinoLdapCredentials,\n    TrinoNoneCredentials,\n    TrinoOauthConsoleCredentials,\n    TrinoOauthCredentials,\n)\n\nfrom .utils import config_from_parts_or_dicts, mock_connection\n\n\nclass TestTrinoAdapter(unittest.TestCase):\n    def setUp(self):\n        flags.STRICT_MODE = True\n\n        profile_cfg = {\n            \"outputs\": {\n                \"test\": {\n                    \"type\": \"trino\",\n                    \"catalog\": \"trinodb\",\n                    \"host\": \"database\",\n                    \"port\": 5439,\n                    \"schema\": \"dbt_test_schema\",\n                    \"method\": \"none\",\n                    \"user\": \"trino_user\",\n                    \"cert\": \"/path/to/cert\",\n                    \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                    \"http_scheme\": \"http\",\n                    \"session_properties\": {\n                        \"query_max_run_time\": \"4h\",\n                        \"exchange_compression\": True,\n                    },\n                }\n            },\n            \"target\": \"test\",\n        }\n\n        project_cfg = {\n            \"name\": \"X\",\n            \"version\": \"0.1\",\n            \"profile\": \"test\",\n            \"project-root\": \"/tmp/dbt/does-not-exist\",\n            \"quoting\": {\n                \"identifier\": False,\n                \"schema\": True,\n            },\n            \"query-comment\": \"dbt\",\n            \"config-version\": 2,\n        }\n\n        self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)\n        self.assertEqual(self.config.query_comment.comment, \"dbt\")\n        self.assertEqual(self.config.query_comment.append, None)\n\n    @property\n    def adapter(self):\n        self._adapter = TrinoAdapter(self.config, get_context(\"spawn\"))\n        return self._adapter\n\n    def test_acquire_connection(self):\n        connection = self.adapter.acquire_connection(\"dummy\")\n        connection.handle\n\n        self.assertEqual(connection.state, \"open\")\n        self.assertIsNotNone(connection.handle)\n\n    def test_cancel_open_connections_empty(self):\n        self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0)\n\n    def test_cancel_open_connections_master(self):\n        key = self.adapter.connections.get_thread_identifier()\n        self.adapter.connections.thread_connections[key] = mock_connection(\"master\")\n        self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0)\n\n    @patch(\"dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection\")\n    def test_database_exception(self, get_thread_connection):\n        self._setup_mock_exception(\n            get_thread_connection, trino.exceptions.ProgrammingError(\"Syntax error\")\n        )\n        with self.assertRaises(DbtDatabaseError):\n            self.adapter.execute(\"select 1\")\n\n    @patch(\"dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection\")\n    def test_failed_to_connect_exception(self, get_thread_connection):\n        self._setup_mock_exception(\n            get_thread_connection,\n            trino.exceptions.OperationalError(\"Failed to establish a new connection\"),\n        )\n        with self.assertRaises(FailedToConnectError):\n            self.adapter.execute(\"select 1\")\n\n    @patch(\"dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection\")\n    def test_dbt_exception(self, get_thread_connection):\n        self._setup_mock_exception(get_thread_connection, Exception(\"Unexpected error\"))\n        with self.assertRaises(DbtRuntimeError):\n            self.adapter.execute(\"select 1\")\n\n    def _setup_mock_exception(self, get_thread_connection, exception):\n        connection = mock_connection(\"master\")\n        connection.handle = MagicMock()\n        cursor = MagicMock()\n        cursor.execute = Mock(side_effect=exception)\n        connection.handle.cursor = MagicMock(return_value=cursor)\n        get_thread_connection.return_value = connection\n\n\nclass TestTrinoAdapterAuthenticationMethods(unittest.TestCase):\n    def setUp(self):\n        flags.STRICT_MODE = True\n\n    def acquire_connection_with_profile(self, profile):\n        profile_cfg = {\n            \"outputs\": {\"test\": profile},\n            \"target\": \"test\",\n        }\n\n        project_cfg = {\n            \"name\": \"X\",\n            \"version\": \"0.1\",\n            \"profile\": \"test\",\n            \"project-root\": \"/tmp/dbt/does-not-exist\",\n            \"quoting\": {\n                \"identifier\": False,\n                \"schema\": True,\n            },\n            \"config-version\": 2,\n        }\n\n        config = config_from_parts_or_dicts(project_cfg, profile_cfg)\n\n        return TrinoAdapter(config, get_context(\"spawn\")).acquire_connection(\"dummy\")\n\n    def assert_default_connection_credentials(self, credentials):\n        self.assertEqual(credentials.type, \"trino\")\n        self.assertEqual(credentials.database, \"trinodb\")\n        self.assertEqual(credentials.host, \"database\")\n        self.assertEqual(credentials.port, 5439)\n        self.assertEqual(credentials.schema, \"dbt_test_schema\")\n        self.assertEqual(credentials.http_headers, {\"X-Trino-Client-Info\": \"dbt-trino\"})\n        self.assertEqual(\n            credentials.session_properties,\n            {\"query_max_run_time\": \"4h\", \"exchange_compression\": True},\n        )\n        self.assertEqual(credentials.prepared_statements_enabled, True)\n        self.assertEqual(credentials.retries, trino.constants.DEFAULT_MAX_ATTEMPTS)\n\n    def test_none_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"schema\": \"dbt_test_schema\",\n                \"user\": \"trino_user\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"none\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"http_scheme\": \"https\",\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assert_default_connection_credentials(credentials)\n        self.assertIsInstance(credentials, TrinoNoneCredentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(credentials.client_tags, [\"dev\", \"none\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_none_authentication_with_method(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"none\",\n                \"schema\": \"dbt_test_schema\",\n                \"user\": \"trino_user\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"none_with_method\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"http_scheme\": \"https\",\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assert_default_connection_credentials(credentials)\n        self.assertIsInstance(credentials, TrinoNoneCredentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(credentials.client_tags, [\"dev\", \"none_with_method\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_none_authentication_without_http_scheme(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"none\",\n                \"schema\": \"dbt_test_schema\",\n                \"user\": \"trino_user\",\n                \"cert\": True,\n                \"client_tags\": [\"dev\", \"without_http_scheme\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assert_default_connection_credentials(credentials)\n        self.assertIsInstance(credentials, TrinoNoneCredentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTP)\n        self.assertEqual(credentials.cert, True)\n        self.assertEqual(credentials.client_tags, [\"dev\", \"without_http_scheme\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_ldap_authentication(self):\n        test_cases = [(False, \"trino_user\"), (True, \"impersonated_user\")]\n        for is_impersonation, expected_user in test_cases:\n            connection = self.acquire_connection_with_profile(\n                {\n                    \"type\": \"trino\",\n                    \"catalog\": \"trinodb\",\n                    \"host\": \"database\",\n                    \"port\": 5439,\n                    \"method\": \"ldap\",\n                    \"schema\": \"dbt_test_schema\",\n                    \"user\": \"trino_user\",\n                    \"impersonation_user\": \"impersonated_user\" if is_impersonation else None,\n                    \"password\": \"trino_password\",\n                    \"cert\": False,\n                    \"client_tags\": [\"dev\", \"ldap\"],\n                    \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                    \"session_properties\": {\n                        \"query_max_run_time\": \"4h\",\n                        \"exchange_compression\": True,\n                    },\n                    \"timezone\": \"UTC\",\n                    \"suppress_cert_warning\": True,\n                }\n            )\n            credentials = connection.credentials\n            connection.handle\n            self.assertIsInstance(credentials, TrinoLdapCredentials)\n            self.assert_default_connection_credentials(credentials)\n            self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n            self.assertEqual(credentials.cert, False)\n            self.assertEqual(connection.handle.handle.user, expected_user)\n            self.assertEqual(credentials.client_tags, [\"dev\", \"ldap\"])\n            self.assertEqual(credentials.timezone, \"UTC\")\n            self.assertEqual(credentials.suppress_cert_warning, True)\n\n    def test_kerberos_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"kerberos\",\n                \"schema\": \"dbt_test_schema\",\n                \"user\": \"trino_user\",\n                \"password\": \"trino_password\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"kerberos\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assertIsInstance(credentials, TrinoKerberosCredentials)\n        self.assert_default_connection_credentials(credentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(credentials.client_tags, [\"dev\", \"kerberos\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_certificate_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"certificate\",\n                \"schema\": \"dbt_test_schema\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"certificate\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"client_certificate\": \"/path/to/client_cert\",\n                \"client_private_key\": \"password\",\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assertIsInstance(credentials, TrinoCertificateCredentials)\n        self.assertIsInstance(credentials.trino_auth(), trino.auth.CertificateAuthentication)\n        self.assertEqual(\n            credentials.trino_auth(),\n            trino.auth.CertificateAuthentication(\"/path/to/client_cert\", \"password\"),\n        )\n        self.assert_default_connection_credentials(credentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(credentials.client_tags, [\"dev\", \"certificate\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_jwt_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"jwt\",\n                \"schema\": \"dbt_test_schema\",\n                \"cert\": \"/path/to/cert\",\n                \"jwt_token\": \"aabbccddeeff\",\n                \"client_tags\": [\"dev\", \"jwt\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assertIsInstance(credentials, TrinoJwtCredentials)\n        self.assert_default_connection_credentials(credentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(credentials.client_tags, [\"dev\", \"jwt\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_oauth_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"oauth\",\n                \"schema\": \"dbt_test_schema\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"oauth\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assertIsInstance(credentials, TrinoOauthCredentials)\n        self.assert_default_connection_credentials(credentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(connection.credentials.prepared_statements_enabled, True)\n        self.assertEqual(credentials.client_tags, [\"dev\", \"oauth\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n    def test_oauth_console_authentication(self):\n        connection = self.acquire_connection_with_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"method\": \"oauth_console\",\n                \"schema\": \"dbt_test_schema\",\n                \"cert\": \"/path/to/cert\",\n                \"client_tags\": [\"dev\", \"oauth_console\"],\n                \"http_headers\": {\"X-Trino-Client-Info\": \"dbt-trino\"},\n                \"session_properties\": {\n                    \"query_max_run_time\": \"4h\",\n                    \"exchange_compression\": True,\n                },\n                \"timezone\": \"UTC\",\n                \"suppress_cert_warning\": False,\n            }\n        )\n        credentials = connection.credentials\n        self.assertIsInstance(credentials, TrinoOauthConsoleCredentials)\n        self.assert_default_connection_credentials(credentials)\n        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)\n        self.assertEqual(credentials.cert, \"/path/to/cert\")\n        self.assertEqual(connection.credentials.prepared_statements_enabled, True)\n        self.assertEqual(credentials.client_tags, [\"dev\", \"oauth_console\"])\n        self.assertEqual(credentials.timezone, \"UTC\")\n        self.assertEqual(credentials.suppress_cert_warning, False)\n\n\nclass TestPreparedStatementsEnabled(TestCase):\n    def setup_profile(self, credentials):\n        profile_cfg = {\n            \"outputs\": {\"test\": credentials},\n            \"target\": \"test\",\n        }\n\n        project_cfg = {\n            \"name\": \"X\",\n            \"version\": \"0.1\",\n            \"profile\": \"test\",\n            \"project-root\": \"/tmp/dbt/does-not-exist\",\n            \"quoting\": {\n                \"identifier\": False,\n                \"schema\": True,\n            },\n            \"config-version\": 2,\n        }\n\n        config = config_from_parts_or_dicts(project_cfg, profile_cfg)\n        adapter = TrinoAdapter(config, get_context(\"spawn\"))\n        connection = adapter.acquire_connection(\"dummy\")\n        return connection\n\n    def test_default(self):\n        connection = self.setup_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"schema\": \"dbt_test_schema\",\n                \"method\": \"none\",\n                \"user\": \"trino_user\",\n                \"http_scheme\": \"http\",\n            }\n        )\n        self.assertEqual(connection.credentials.prepared_statements_enabled, True)\n\n    def test_false(self):\n        connection = self.setup_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"schema\": \"dbt_test_schema\",\n                \"method\": \"none\",\n                \"user\": \"trino_user\",\n                \"http_scheme\": \"http\",\n                \"prepared_statements_enabled\": False,\n            }\n        )\n        self.assertEqual(connection.credentials.prepared_statements_enabled, False)\n\n    def test_true(self):\n        connection = self.setup_profile(\n            {\n                \"type\": \"trino\",\n                \"catalog\": \"trinodb\",\n                \"host\": \"database\",\n                \"port\": 5439,\n                \"schema\": \"dbt_test_schema\",\n                \"method\": \"none\",\n                \"user\": \"trino_user\",\n                \"http_scheme\": \"http\",\n                \"prepared_statements_enabled\": True,\n            }\n        )\n        self.assertEqual(connection.credentials.prepared_statements_enabled, True)\n\n\nclass TestAdapterConversions(TestCase):\n    def _get_tester_for(self, column_type):\n        if column_type is agate.TimeDelta:  # dbt never makes this!\n            return agate.TimeDelta()\n\n        for instance in agate_helper.DEFAULT_TYPE_TESTER._possible_types:\n            if isinstance(instance, column_type):\n                return instance\n\n        raise ValueError(f\"no tester for {column_type}\")\n\n    def _make_table_of(self, rows, column_types):\n        column_names = list(string.ascii_letters[: len(rows[0])])\n        if isinstance(column_types, type):\n            column_types = [self._get_tester_for(column_types) for _ in column_names]\n        else:\n            column_types = [self._get_tester_for(typ) for typ in column_types]\n        table = agate.Table(rows, column_names=column_names, column_types=column_types)\n        return table\n\n\nclass TestTrinoAdapterConversions(TestAdapterConversions):\n    def test_convert_text_type(self):\n        rows = [\n            [\"\", \"a1\", \"stringval1\"],\n            [\"\", \"a2\", \"stringvalasdfasdfasdfa\"],\n            [\"\", \"a3\", \"stringval3\"],\n        ]\n        agate_table = self._make_table_of(rows, agate.Text)\n        expected = [\"VARCHAR\", \"VARCHAR\", \"VARCHAR\"]\n        for col_idx, expect in enumerate(expected):\n            assert TrinoAdapter.convert_text_type(agate_table, col_idx) == expect\n\n    def test_convert_number_type(self):\n        rows = [\n            [\"\", \"23.98\", \"-1\"],\n            [\"\", \"12.78\", \"-2\"],\n            [\"\", \"79.41\", \"-3\"],\n        ]\n        agate_table = self._make_table_of(rows, agate.Number)\n        expected = [\"INTEGER\", \"DOUBLE\", \"INTEGER\"]\n        for col_idx, expect in enumerate(expected):\n            assert TrinoAdapter.convert_number_type(agate_table, col_idx) == expect\n\n    def test_convert_boolean_type(self):\n        rows = [\n            [\"\", \"false\", \"true\"],\n            [\"\", \"false\", \"false\"],\n            [\"\", \"false\", \"true\"],\n        ]\n        agate_table = self._make_table_of(rows, agate.Boolean)\n        expected = [\"boolean\", \"boolean\", \"boolean\"]\n        for col_idx, expect in enumerate(expected):\n            assert TrinoAdapter.convert_boolean_type(agate_table, col_idx) == expect\n\n    def test_convert_datetime_type(self):\n        rows = [\n            [\"\", \"20190101T01:01:01Z\", \"2019-01-01 01:01:01\"],\n            [\"\", \"20190102T01:01:01Z\", \"2019-01-01 01:01:01\"],\n            [\"\", \"20190103T01:01:01Z\", \"2019-01-01 01:01:01\"],\n        ]\n        agate_table = self._make_table_of(\n            rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime]\n        )\n        expected = [\"TIMESTAMP\", \"TIMESTAMP\", \"TIMESTAMP\"]\n        for col_idx, expect in enumerate(expected):\n            assert TrinoAdapter.convert_datetime_type(agate_table, col_idx) == expect\n\n    def test_convert_date_type(self):\n        rows = [\n            [\"\", \"2019-01-01\", \"2019-01-04\"],\n            [\"\", \"2019-01-02\", \"2019-01-04\"],\n            [\"\", \"2019-01-03\", \"2019-01-04\"],\n        ]\n        agate_table = self._make_table_of(rows, agate.Date)\n        expected = [\"DATE\", \"DATE\", \"DATE\"]\n        for col_idx, expect in enumerate(expected):\n            assert TrinoAdapter.convert_date_type(agate_table, col_idx) == expect\n\n\nclass TestTrinoColumn(unittest.TestCase):\n    def test_bound_varchar(self):\n        col = TrinoColumn.from_description(\"my_col\", \"VARCHAR(100)\")\n        assert col.column == \"my_col\"\n        assert col.dtype == \"VARCHAR\"\n        assert col.char_size == 100\n        # bounded varchars get formatted to lowercase\n        assert col.data_type == \"varchar(100)\"\n        assert col.string_size() == 100\n        assert col.is_string() is True\n        assert col.is_number() is False\n        assert col.is_numeric() is False\n\n    def test_unbound_varchar(self):\n        col = TrinoColumn.from_description(\"my_col\", \"VARCHAR\")\n        assert col.column == \"my_col\"\n        assert col.dtype == \"VARCHAR\"\n        assert col.char_size is None\n        assert col.data_type == \"VARCHAR\"\n        assert col.string_size() == TRINO_VARCHAR_MAX_LENGTH\n        assert col.is_string() is True\n        assert col.is_number() is False\n        assert col.is_numeric() is False\n"
  },
  {
    "path": "tests/unit/utils.py",
    "content": "\"\"\"Unit test utility functions.\n\nNote that all imports should be inside the functions to avoid import/mocking\nissues.\n\"\"\"\nimport os\nfrom unittest import TestCase, mock\n\nfrom dbt.config.project import PartialProject\nfrom dbt_common.dataclass_schema import ValidationError\n\n\ndef normalize(path):\n    \"\"\"On windows, neither is enough on its own:\n\n    >>> normcase('C:\\\\documents/ALL CAPS/subdir\\\\..')\n    'c:\\\\documents\\\\all caps\\\\subdir\\\\..'\n    >>> normpath('C:\\\\documents/ALL CAPS/subdir\\\\..')\n    'C:\\\\documents\\\\ALL CAPS'\n    >>> normpath(normcase('C:\\\\documents/ALL CAPS/subdir\\\\..'))\n    'c:\\\\documents\\\\all caps'\n    \"\"\"\n    return os.path.normcase(os.path.normpath(path))\n\n\nclass Obj:\n    which = \"blah\"\n    single_threaded = False\n\n\ndef mock_connection(name):\n    conn = mock.MagicMock()\n    conn.name = name\n    return conn\n\n\ndef profile_from_dict(profile, profile_name, cli_vars=\"{}\"):\n    from dbt.config import Profile\n    from dbt.config.renderer import ProfileRenderer\n    from dbt.config.utils import parse_cli_vars\n\n    if not isinstance(cli_vars, dict):\n        cli_vars = parse_cli_vars(cli_vars)\n\n    renderer = ProfileRenderer(cli_vars)\n\n    # in order to call dbt's internal profile rendering, we need to set the\n    # flags global. This is a bit of a hack, but it's the best way to do it.\n    from argparse import Namespace\n\n    from dbt.flags import set_from_args\n\n    set_from_args(Namespace(), None)\n    return Profile.from_raw_profile_info(\n        profile,\n        profile_name,\n        renderer,\n    )\n\n\ndef project_from_dict(project, profile, packages=None, selectors=None, cli_vars=\"{}\"):\n    from dbt.config.renderer import DbtProjectYamlRenderer\n    from dbt.config.utils import parse_cli_vars\n\n    if not isinstance(cli_vars, dict):\n        cli_vars = parse_cli_vars(cli_vars)\n\n    renderer = DbtProjectYamlRenderer(profile, cli_vars)\n\n    project_root = project.pop(\"project-root\", os.getcwd())\n\n    partial = PartialProject.from_dicts(\n        project_root=project_root,\n        project_dict=project,\n        packages_dict=packages,\n        selectors_dict=selectors,\n    )\n    return partial.render(renderer)\n\n\ndef config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars=\"{}\"):\n    from copy import deepcopy\n\n    from dbt.config import Profile, Project, RuntimeConfig\n    from dbt.config.utils import parse_cli_vars\n\n    if not isinstance(cli_vars, dict):\n        cli_vars = parse_cli_vars(cli_vars)\n\n    if isinstance(project, Project):\n        profile_name = project.profile_name\n    else:\n        profile_name = project.get(\"profile\")\n\n    if not isinstance(profile, Profile):\n        profile = profile_from_dict(\n            deepcopy(profile),\n            profile_name,\n            cli_vars,\n        )\n\n    if not isinstance(project, Project):\n        project = project_from_dict(\n            deepcopy(project),\n            profile,\n            packages,\n            selectors,\n            cli_vars,\n        )\n\n    args = Obj()\n    args.vars = cli_vars\n    args.profile_dir = \"/dev/null\"\n    return RuntimeConfig.from_parts(project=project, profile=profile, args=args)\n\n\ndef inject_plugin(plugin):\n    from dbt.adapters.factory import FACTORY\n\n    key = plugin.adapter.type()\n    FACTORY.plugins[key] = plugin\n\n\ndef inject_adapter(value, plugin):\n    \"\"\"Inject the given adapter into the adapter factory, so your hand-crafted\n    artisanal adapter will be available from get_adapter() as if dbt loaded it.\n    \"\"\"\n    inject_plugin(plugin)\n    from dbt.adapters.factory import FACTORY\n\n    key = value.type()\n    FACTORY.adapters[key] = value\n\n\nclass ContractTestCase(TestCase):\n    ContractType = None\n\n    def setUp(self):\n        self.maxDiff = None\n        super().setUp()\n\n    def assert_to_dict(self, obj, dct):\n        self.assertEqual(obj.to_dict(), dct)\n\n    def assert_from_dict(self, obj, dct, cls=None):\n        if cls is None:\n            cls = self.ContractType\n        self.assertEqual(cls.from_dict(dct), obj)\n\n    def assert_symmetric(self, obj, dct, cls=None):\n        self.assert_to_dict(obj, dct)\n        self.assert_from_dict(obj, dct, cls)\n\n    def assert_fails_validation(self, dct, cls=None):\n        if cls is None:\n            cls = self.ContractType\n\n        with self.assertRaises(ValidationError):\n            cls.validate(dct)\n            cls.from_dict(dct)\n\n\ndef generate_name_macros(package):\n    from dbt.contracts.graph.parsed import ParsedMacro\n    from dbt.node_types import NodeType\n\n    name_sql = {}\n    for component in (\"database\", \"schema\", \"alias\"):\n        if component == \"alias\":\n            source = \"node.name\"\n        else:\n            source = f\"target.{component}\"\n        name = f\"generate_{component}_name\"\n        sql = f\"{{% macro {name}(value, node) %}} {{% if value %}} {{{{ value }}}} {{% else %}} {{{{ {source} }}}} {{% endif %}} {{% endmacro %}}\"\n        name_sql[name] = sql\n\n    all_sql = \"\\n\".join(name_sql.values())\n    for name, sql in name_sql.items():\n        pm = ParsedMacro(\n            name=name,\n            resource_type=NodeType.Macro,\n            unique_id=f\"macro.{package}.{name}\",\n            package_name=package,\n            original_file_path=normalize(\"macros/macro.sql\"),\n            root_path=\"./dbt_modules/root\",\n            path=normalize(\"macros/macro.sql\"),\n            raw_sql=all_sql,\n            macro_sql=sql,\n        )\n        yield pm\n"
  },
  {
    "path": "tox.ini",
    "content": "[tox]\nskipsdist = True\nenvlist = unit, integration\n\n[testenv:unit]\ndescription = unit testing\nbasepython = python3\ncommands = {envpython} -m pytest -v {posargs} tests/unit\npassenv = DBT_INVOCATION_ENV\ndeps =\n    -r{toxinidir}/dev_requirements.txt\n    -e.\n\n[testenv:integration]\ndescription = adapter plugin integration testing\nbasepython = python3\ncommands = {envpython} -m pytest {posargs} tests/functional\npassenv = DBT_INVOCATION_ENV, DBT_TEST_TRINO_HOST, DBT_TEST_USER_1, DBT_TEST_USER_2, DBT_TEST_USER_3\ndeps =\n    -r{toxinidir}/dev_requirements.txt\n    -e.\n"
  }
]