Repository: starburstdata/dbt-trino Branch: master Commit: 5813b01fa239 Files: 164 Total size: 451.5 KB Directory structure: gitextract_za4id763/ ├── .changes/ │ ├── 0.0.0.md │ ├── 1.10.0/ │ │ └── Features-20251210-194211.yaml │ ├── 1.10.0.md │ ├── 1.10.1/ │ │ └── Dependencies-20260115-092226.yaml │ ├── 1.10.1.md │ ├── header.tpl.md │ └── unreleased/ │ └── .gitkeep ├── .changie.yaml ├── .flake8 ├── .github/ │ ├── ISSUE_TEMPLATE/ │ │ ├── bug_report.yml │ │ ├── config.yml │ │ └── feature_request.yml │ ├── dependabot.yml │ ├── pull_request_template.md │ └── workflows/ │ ├── bot-changelog.yml │ ├── changelog-existence.yml │ ├── ci.yml │ ├── release.yml │ ├── security.yml │ └── version-bump.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── Makefile ├── README.md ├── dbt/ │ ├── adapters/ │ │ └── trino/ │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── catalogs/ │ │ │ ├── __init__.py │ │ │ ├── _relation.py │ │ │ └── _trino_catalog_metastore.py │ │ ├── column.py │ │ ├── connections.py │ │ ├── constants.py │ │ ├── impl.py │ │ ├── parse_model.py │ │ └── relation.py │ └── include/ │ └── trino/ │ ├── __init__.py │ ├── dbt_project.yml │ ├── macros/ │ │ ├── adapters.sql │ │ ├── apply_grants.sql │ │ ├── catalog.sql │ │ ├── materializations/ │ │ │ ├── incremental.sql │ │ │ ├── materialized_view.sql │ │ │ ├── seeds/ │ │ │ │ └── helpers.sql │ │ │ ├── snapshot.sql │ │ │ ├── table.sql │ │ │ └── view.sql │ │ └── utils/ │ │ ├── any_value.sql │ │ ├── array_append.sql │ │ ├── array_concat.sql │ │ ├── array_construct.sql │ │ ├── bool_or.sql │ │ ├── datatypes.sql │ │ ├── date_spine.sql │ │ ├── date_trunc.sql │ │ ├── dateadd.sql │ │ ├── datediff.sql │ │ ├── hash.sql │ │ ├── listagg.sql │ │ ├── right.sql │ │ ├── safe_cast.sql │ │ ├── split_part.sql │ │ └── timestamps.sql │ └── sample_profiles.yml ├── dev_requirements.txt ├── docker/ │ ├── init_starburst.bash │ ├── init_trino.bash │ ├── remove_starburst.bash │ ├── remove_trino.bash │ ├── starburst/ │ │ ├── catalog/ │ │ │ ├── delta.properties │ │ │ ├── hive.properties │ │ │ ├── iceberg.properties │ │ │ ├── memory.properties │ │ │ ├── postgresql.properties │ │ │ └── tpch.properties │ │ └── etc/ │ │ ├── config.properties │ │ ├── jvm.config │ │ └── node.properties │ └── trino/ │ ├── catalog/ │ │ ├── delta.properties │ │ ├── hive.properties │ │ ├── iceberg.properties │ │ ├── memory.properties │ │ ├── postgresql.properties │ │ └── tpch.properties │ └── etc/ │ ├── config.properties │ ├── jvm.config │ └── node.properties ├── docker-compose-starburst.yml ├── docker-compose-trino.yml ├── mypy.ini ├── pytest.ini ├── setup.py ├── tests/ │ ├── conftest.py │ ├── functional/ │ │ └── adapter/ │ │ ├── behavior_flags/ │ │ │ └── test_require_certificate_validation.py │ │ ├── catalog_integrations/ │ │ │ ├── fixtures.py │ │ │ └── test_catalog_integration.py │ │ ├── column_types/ │ │ │ ├── fixtures.py │ │ │ └── test_column_types.py │ │ ├── constraints/ │ │ │ ├── fixtures.py │ │ │ └── test_constraints.py │ │ ├── dbt_clone/ │ │ │ └── test_dbt_clone.py │ │ ├── dbt_debug/ │ │ │ └── test_dbt_debug.py │ │ ├── dbt_show/ │ │ │ └── test_dbt_show.py │ │ ├── empty/ │ │ │ └── test_empty.py │ │ ├── fixture_datediff.py │ │ ├── hooks/ │ │ │ ├── data/ │ │ │ │ ├── seed_model.sql │ │ │ │ └── seed_run.sql │ │ │ ├── test_hooks_delete.py │ │ │ ├── test_model_hooks.py │ │ │ └── test_run_hooks.py │ │ ├── materialization/ │ │ │ ├── fixtures.py │ │ │ ├── test_incremental_delete_insert.py │ │ │ ├── test_incremental_merge.py │ │ │ ├── test_incremental_microbatch.py │ │ │ ├── test_incremental_predicates.py │ │ │ ├── test_incremental_schema.py │ │ │ ├── test_incremental_views_enabled.py │ │ │ ├── test_materialized_view.py │ │ │ ├── test_on_table_exists.py │ │ │ ├── test_prepared_statements.py │ │ │ ├── test_snapshot.py │ │ │ └── test_view_security.py │ │ ├── materialized_view_tests/ │ │ │ ├── test_materialized_view_dbt_core.py │ │ │ └── utils.py │ │ ├── persist_docs/ │ │ │ ├── fixtures.py │ │ │ └── test_persist_docs.py │ │ ├── show/ │ │ │ ├── fixtures.py │ │ │ └── test_show.py │ │ ├── simple_seed/ │ │ │ ├── seed_bom.csv │ │ │ ├── seeds.py │ │ │ └── test_seed.py │ │ ├── store_failures/ │ │ │ ├── fixtures.py │ │ │ └── test_store_failures.py │ │ ├── test_basic.py │ │ ├── test_caching.py │ │ ├── test_changing_relation_type.py │ │ ├── test_concurrency.py │ │ ├── test_custom_schema.py │ │ ├── test_ephemeral.py │ │ ├── test_get_incremental_tmp_relation_type_macro.py │ │ ├── test_grants.py │ │ ├── test_query_comments.py │ │ ├── test_quote_policy.py │ │ ├── test_sample_mode.py │ │ ├── test_seeds_column_types_overrides.py │ │ ├── test_session_property.py │ │ ├── test_simple_copy.py │ │ ├── test_simple_snapshot.py │ │ ├── test_sql_status_output.py │ │ ├── test_table_properties.py │ │ ├── unit_testing/ │ │ │ └── test_unit_testing.py │ │ └── utils/ │ │ ├── fixture_date_spine.py │ │ ├── fixture_get_intervals_between.py │ │ ├── test_data_types.py │ │ ├── test_date_spine.py │ │ ├── test_get_intervals_between.py │ │ ├── test_timestamps.py │ │ └── test_utils.py │ └── unit/ │ ├── __init__.py │ ├── test_adapter.py │ └── utils.py └── tox.ini ================================================ FILE CONTENTS ================================================ ================================================ FILE: .changes/0.0.0.md ================================================ ## Previous Releases For information on prior major and minor releases, see their changelogs: * [1.9](https://github.com/starburstdata/dbt-trino/blob/1.9.latest/CHANGELOG.md) * [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md) * [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md) * [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md) * [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md) * [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md) * [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md) * [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md) * [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md) * [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md) ================================================ FILE: .changes/1.10.0/Features-20251210-194211.yaml ================================================ kind: Features body: Add support for catalog integration time: 2025-12-10T19:42:11.700646+01:00 custom: Author: damian3031 Issue: "" PR: "502" ================================================ FILE: .changes/1.10.0.md ================================================ ## dbt-trino 1.10.0 - December 16, 2025 ### Features - Add support for catalog integration ([#502](https://github.com/starburstdata/dbt-trino/pull/502)) ### Contributors - [@damian3031](https://github.com/damian3031) ([#502](https://github.com/starburstdata/dbt-trino/pull/502)) ================================================ FILE: .changes/1.10.1/Dependencies-20260115-092226.yaml ================================================ kind: Dependencies body: Bump dbt-adapters>=1.16,<2.0 time: 2026-01-15T09:22:26.968512-08:00 custom: Author: zqureshi Issue: "507" PR: "507" ================================================ FILE: .changes/1.10.1.md ================================================ ## dbt-trino 1.10.1 - January 16, 2026 ### Dependencies - Bump dbt-adapters>=1.16,<2.0 ([#507](https://github.com/starburstdata/dbt-trino/issues/507), [#507](https://github.com/starburstdata/dbt-trino/pull/507)) ### Contributors - [@zqureshi](https://github.com/zqureshi) ([#507](https://github.com/starburstdata/dbt-trino/pull/507)) ================================================ FILE: .changes/header.tpl.md ================================================ # dbt-trino Changelog - This file provides a full account of all changes to `dbt-trino` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry) ================================================ FILE: .changes/unreleased/.gitkeep ================================================ ================================================ FILE: .changie.yaml ================================================ changesDir: .changes unreleasedDir: unreleased headerPath: header.tpl.md versionHeaderPath: "" changelogPath: CHANGELOG.md versionExt: md versionFormat: '## dbt-trino {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' kinds: - label: Breaking Changes - label: Features - label: Fixes - label: Under the Hood - label: Dependencies changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' - label: Security changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' newlines: beforeChangelogHeader: 1 custom: - key: Author label: GitHub Username(s) (separated by a single space if multiple) type: string minLength: 3 - key: Issue label: GitHub Issue Number type: int minInt: 1 optional: true - key: PR label: GitHub Pull Request Number type: int minInt: 1 footerFormat: | {{- $contributorDict := dict }} {{- range $change := .Changes }} {{- $authorList := splitList " " $change.Custom.Author }} {{- /* loop through all authors for a PR */}} {{- range $author := $authorList }} {{- $authorLower := lower $author }} {{- $prLink := $change.Kind }} {{- $prLink = "[#pr](https://github.com/starburstdata/dbt-trino/pull/pr)" | replace "pr" $change.Custom.PR }} {{- /* check if this contributor has other PRs associated with them already */}} {{- if hasKey $contributorDict $author }} {{- $prList := get $contributorDict $author }} {{- $prList = append $prList $prLink }} {{- $contributorDict := set $contributorDict $author $prList }} {{- else }} {{- $prList := list $prLink }} {{- $contributorDict := set $contributorDict $author $prList }} {{- end }} {{- end}} {{- end }} {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} {{- if $contributorDict}} ### Contributors {{- range $k,$v := $contributorDict }} - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) {{- end }} {{- end }} ================================================ FILE: .flake8 ================================================ [flake8] select = E W F ignore = W503, W504, E203, E741, E501, exclude = test ================================================ FILE: .github/ISSUE_TEMPLATE/bug_report.yml ================================================ --- name: Bug report description: Report a bug or an issue you've found with dbt-trino labels: bug body: - type: textarea attributes: label: Expected behavior description: What do you think should have happened placeholder: > A clear and concise description of what you expected to happen. validations: required: true - type: textarea attributes: label: Actual behavior description: Describe what actually happened placeholder: > A clear and concise description of what actually happened. validations: required: true - type: textarea attributes: label: Steps To Reproduce description: This will help us reproduce your issue placeholder: > In as much detail as possible, please provide steps to reproduce the issue. Sample code that triggers the issue, relevant server settings, etc is all very helpful here. validations: required: true - type: textarea attributes: label: Log output/Screenshots description: What do you think went wrong? placeholder: > If applicable, add log output and/or screenshots to help explain your problem. - type: input attributes: label: Operating System description: What Operating System are you using? placeholder: "You can get it via `cat /etc/os-release` for example" validations: required: true - type: input attributes: label: dbt version description: "Execute `dbt --version`" placeholder: Which version of dbt are you using? validations: required: true - type: input attributes: label: Trino Server version description: "Run `SELECT VERSION();` on your Trino server" placeholder: Which Trino server version are you using? validations: required: true - type: input attributes: label: Python version description: "You can get it via executing `python --version`" placeholder: What Python version are you using? validations: required: true - type: checkboxes attributes: label: Are you willing to submit PR? description: > This is absolutely not required, but we are happy to guide you in the contribution process especially if you already have a good understanding of how to implement the feature. options: - label: Yes I am willing to submit a PR! - type: markdown attributes: value: "Thanks for completing our form!" ================================================ FILE: .github/ISSUE_TEMPLATE/config.yml ================================================ --- contact_links: - name: Ask a question or get help around `dbt-trino` on Slack url: https://getdbt.slack.com/channels/db-presto-trino about: Get help and share your experiences around `dbt-trino` with the `dbt` Slack community. ================================================ FILE: .github/ISSUE_TEMPLATE/feature_request.yml ================================================ --- name: Feature request description: Suggest an idea for dbt-trino labels: enhancement body: - type: textarea attributes: label: Describe the feature description: What would you like to happen? placeholder: > A clear and concise description of what you want to happen and what problem it would solve. validations: required: true - type: textarea attributes: label: Describe alternatives you've considered description: What did you try to make it happen? placeholder: > A clear and concise description of any alternative solutions or features you've considered. - type: textarea attributes: label: Who will benefit? placeholder: > What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly. - type: checkboxes attributes: label: Are you willing to submit PR? description: > This is absolutely not required, but we are happy to guide you in the contribution process especially if you already have a good understanding of how to implement the feature. options: - label: Yes I am willing to submit a PR! - type: markdown attributes: value: "Thanks for completing our form!" ================================================ FILE: .github/dependabot.yml ================================================ version: 2 updates: # python dependencies - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" rebase-strategy: "disabled" labels: - "Skip Changelog" - "dependencies" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" rebase-strategy: "disabled" ================================================ FILE: .github/pull_request_template.md ================================================ ## Overview ## Checklist - [ ] I have run this code in development and it appears to resolve the stated issue - [ ] This PR includes tests, or tests are not required/relevant for this PR - [ ] `README.md` updated and added information about my change - [ ] I have run `changie new` to [create a changelog entry](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#Adding-CHANGELOG-Entry) ================================================ FILE: .github/workflows/bot-changelog.yml ================================================ # **what?** # When bots create a PR, this action will add a corresponding changie yaml file to that # PR when a specific label is added. # # The file is created off a template: # # kind: # body: # time: # custom: # Author: # Issue: 4904 # PR: # # **why?** # Automate changelog generation for more visability with automated bot PRs. # # **when?** # Once a PR is created, label should be added to PR before or after creation. You can also # manually trigger this by adding the appropriate label at any time. # # **how to add another bot?** # Add the label and changie kind to the include matrix. That's it! # name: Bot Changelog on: pull_request: # catch when the PR is opened with the label or when the label is added types: [opened, labeled] permissions: contents: write pull-requests: read jobs: generate_changelog: runs-on: ubuntu-latest steps: - name: Check out the repository uses: actions/checkout@v4 with: fetch-depth: 2 - name: Create and commit changelog on bot PR id: bot_changelog uses: emmyoop/changie_bot@v1.0 with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} commit_author_name: "starburstdata-automation" commit_author_email: "automation@starburstdata.com" commit_message: ${{ github.event.pull_request.title }} changie_kind: "Dependencies" label: "dependencies" custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ''\n PR: ${{ github.event.pull_request.number }}" ================================================ FILE: .github/workflows/changelog-existence.yml ================================================ # **what?** # Checks that a file has been committed under the /.changes directory # as a new CHANGELOG entry. Cannot check for a specific filename as # it is dynamically generated by change type and timestamp. # This workflow should not require any secrets since it runs for PRs # from forked repos. # By default, secrets are not passed to workflows running from # a forked repo. # **why?** # Ensure code change gets reflected in the CHANGELOG. # **when?** # This will run for all PRs going into master. It will # run when they are opened, reopened, when any label is added or removed # and when new code is pushed to the branch. The action will then get # skipped if the 'Skip Changelog' label is present is any of the labels. name: Check Changelog Entry on: pull_request: types: [opened, reopened, labeled, unlabeled, synchronize] workflow_dispatch: defaults: run: shell: bash permissions: contents: read pull-requests: write jobs: changelog: uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main with: changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry).' skip_label: 'Skip Changelog' secrets: inherit ================================================ FILE: .github/workflows/ci.yml ================================================ name: dbt-trino tests on: push: branches: - master - "*.*.latest" paths-ignore: - "**/*.md" pull_request: branches: - master - "*.*.latest" paths-ignore: - "**/*.md" jobs: checks: runs-on: ubuntu-latest steps: - name: "Checkout the source code" uses: actions/checkout@v4 - name: "Install Python" uses: actions/setup-python@v5 - name: "Install dev requirements" run: pip install -r dev_requirements.txt - name: "Run pre-commit checks" run: pre-commit run --all-files test: runs-on: ubuntu-latest strategy: fail-fast: false matrix: engine: - "trino" - "starburst" - "starburst_galaxy" python: - "3.9" - "3.10" - "3.11" - "3.12" - "3.13" isStarburstBranch: - ${{ (github.event_name == 'pull_request' && contains(github.event.pull_request.head.repo.full_name, 'starburstdata')) || github.event_name != 'pull_request' }} exclude: - engine: "starburst_galaxy" python: "3.13" isStarburstBranch: false - engine: "starburst_galaxy" python: "3.12" - engine: "starburst_galaxy" python: "3.11" - engine: "starburst_galaxy" python: "3.10" - engine: "starburst_galaxy" python: "3.9" steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Run dbt-trino tests against ${{ matrix.engine }} on python ${{ matrix.python }} env: DBT_TESTS_STARBURST_GALAXY_HOST: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_HOST }} DBT_TESTS_STARBURST_GALAXY_USER: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_USER }} DBT_TESTS_STARBURST_GALAXY_PASSWORD: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_PASSWORD }} run: | if [[ ${{ matrix.engine }} == "trino" || ${{ matrix.engine }} == "starburst" ]]; then make dbt-${{ matrix.engine }}-tests elif [[ ${{ matrix.engine }} == "starburst_galaxy" ]]; then python -m pip install -e . -r dev_requirements.txt python -m pytest tests/functional --profile starburst_galaxy fi - name: Remove container on failure if: failure() run: ./docker/remove_${{ matrix.engine }}.bash || true ================================================ FILE: .github/workflows/release.yml ================================================ name: dbt-trino release on: workflow_dispatch: jobs: test: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.13" - name: Test release run: | python3 -m venv env source env/bin/activate pip install -r dev_requirements.txt pip install twine wheel setuptools python setup.py sdist bdist_wheel pip install dist/dbt_trino-*.tar.gz pip install dist/dbt_trino-*-py3-none-any.whl twine check dist/dbt_trino-*-py3-none-any.whl dist/dbt_trino-*.tar.gz github-release: name: GitHub release runs-on: ubuntu-latest needs: test steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.13" - name: Get dbt-trino version run: echo "version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = "\(.*\)\"/\1/p')" >> $GITHUB_ENV # Need to set an output variable because env variables can't be taken as input # This is needed for the next step with releasing to GitHub - name: Find release type id: release_type env: IS_PRERELEASE: ${{ contains(env.version_number, 'rc') || contains(env.version_number, 'b') }} run: | echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT - name: Create GitHub release uses: actions/create-release@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token with: tag_name: v${{env.version_number}} release_name: v${{env.version_number}} prerelease: ${{ steps.release_type.outputs.isPrerelease }} body: | [Release notes](https://github.com/starburstdata/dbt-trino/blob/master/CHANGELOG.md) ```sh $ pip install dbt-trino==${{env.version_number}} ``` pypi-release: name: Pypi release runs-on: ubuntu-latest needs: github-release environment: PypiProd permissions: id-token: write steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.13" - name: Get dbt-trino version run: echo "version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = "\(.*\)\"/\1/p')" >> $GITHUB_ENV - name: Release to pypi run: | python3 -m venv env source env/bin/activate pip install -r dev_requirements.txt pip install twine wheel setuptools python setup.py sdist bdist_wheel twine upload --non-interactive dist/dbt_trino-${{env.version_number}}-py3-none-any.whl dist/dbt_trino-${{env.version_number}}.tar.gz ================================================ FILE: .github/workflows/security.yml ================================================ name: Veracode SCA on: workflow_dispatch: jobs: veracode-sca-task: runs-on: ubuntu-latest name: Scan repository for Issues steps: - name: Checkout uses: actions/checkout@v4 - name: Run Veracode SCA env: SRCCLR_API_TOKEN: ${{ secrets.SRCCLR_API_TOKEN }} uses: veracode/veracode-sca@v1.09 with: github_token: ${{ secrets.GITHUB_TOKEN }} create-issues: true min-cvss-for-issue: 1 fail-on-cvss: 11 ================================================ FILE: .github/workflows/version-bump.yml ================================================ # **what?** # This workflow will take the new version number to bump to. With that # it will run versionbump to update the version number everywhere in the # code base and then run changie to create the corresponding changelog. # A PR will be created with the changes that can be reviewed before committing. # **why?** # This is to aid in releasing dbt-trino and making sure we have updated # the version in all places and generated the changelog. # **when?** # This is triggered manually name: Version Bump on: workflow_dispatch: inputs: version_number: description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' required: true jobs: bump: runs-on: ubuntu-latest steps: - name: "[DEBUG] Print Variables" run: | echo "all variables defined as inputs" echo The version_number: ${{ github.event.inputs.version_number }} - name: Check out the repository uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install brew run: | echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH - name: Install python dependencies run: | python3 -m venv env source env/bin/activate pip install --upgrade pip - name: Audit Version and Parse Into Parts id: semver uses: dbt-labs/actions/parse-semver@v1 with: version: ${{ github.event.inputs.version_number }} - name: Set branch value id: variables run: | echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT - name: Create PR branch run: | git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }} git push origin ${{ steps.variables.outputs.BRANCH_NAME }} git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }} - name: Bump version run: | echo -en "version = \"${{ github.event.inputs.version_number }}\"\n" > dbt/adapters/trino/__version__.py git status - name: Run changie run: | brew tap miniscruff/changie https://github.com/miniscruff/changie brew install changie if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]] then changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}' else if [[ -d ".changes/${{ steps.semver.outputs.base-version }}" ]] then changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases else changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' fi fi changie merge git status - name: Commit version bump to branch uses: EndBug/add-and-commit@v9 with: author_name: 'Github Build Bot' author_email: 'automation@starburstdata.com' message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG' branch: '${{ steps.variables.outputs.BRANCH_NAME }}' push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}' - name: Create Pull Request uses: peter-evans/create-pull-request@v7 with: author: 'Github Build Bot ' base: ${{github.ref}} title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog' branch: '${{ steps.variables.outputs.BRANCH_NAME }}' labels: | Skip Changelog ================================================ FILE: .gitignore ================================================ *.egg-info env/ __pycache__/ .tox/ .idea/ build/ dist/ dbt-integration-tests docker/dbt/.user.yml .DS_Store .vscode/ logs/ .venv/ ================================================ FILE: .pre-commit-config.yaml ================================================ # Configuration for pre-commit hooks (see https://pre-commit.com/). # Eventually the hooks described here will be run as tests before merging each PR. # TODO: remove global exclusion of tests when testing overhaul is complete exclude: ^test/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: - id: check-yaml args: [--unsafe] - id: check-json - id: end-of-file-fixer - id: trailing-whitespace exclude_types: - "markdown" - id: check-case-conflict - repo: https://github.com/dbt-labs/pre-commit-hooks rev: v0.1.0a1 hooks: - id: dbt-core-in-adapters-check - repo: https://github.com/psf/black rev: 23.3.0 hooks: - id: black args: - "--line-length=99" - "--target-version=py38" - id: black alias: black-check stages: [manual] args: - "--line-length=99" - "--target-version=py38" - "--check" - "--diff" - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: - id: isort args: [ "--profile", "black", "--filter-files" ] - repo: https://github.com/pycqa/flake8 rev: 7.1.2 hooks: - id: flake8 - id: flake8 alias: flake8-check stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.2.0 hooks: - id: mypy # N.B.: Mypy is... a bit fragile. # # By using `language: system` we run this hook in the local # environment instead of a pre-commit isolated one. This is needed # to ensure mypy correctly parses the project. # It may cause trouble in that it adds environmental variables out # of our control to the mix. Unfortunately, there's nothing we can # do about per pre-commit's author. # See https://github.com/pre-commit/pre-commit/issues/730 for details. args: [ --show-error-codes, --ignore-missing-imports ] files: ^dbt/adapters/.* language: system - id: mypy alias: mypy-check stages: [ manual ] args: [ --show-error-codes, --pretty, --ignore-missing-imports ] files: ^dbt/adapters language: system ================================================ FILE: CHANGELOG.md ================================================ # dbt-trino Changelog - This file provides a full account of all changes to `dbt-trino` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry) ## dbt-trino 1.10.1 - January 16, 2026 ### Dependencies - Bump dbt-adapters>=1.16,<2.0 ([#507](https://github.com/starburstdata/dbt-trino/issues/507), [#507](https://github.com/starburstdata/dbt-trino/pull/507)) ### Contributors - [@zqureshi](https://github.com/zqureshi) ([#507](https://github.com/starburstdata/dbt-trino/pull/507)) ## dbt-trino 1.10.0 - December 16, 2025 ### Features - Add support for catalog integration ([#502](https://github.com/starburstdata/dbt-trino/pull/502)) ### Contributors - [@damian3031](https://github.com/damian3031) ([#502](https://github.com/starburstdata/dbt-trino/pull/502)) ## Previous Releases For information on prior major and minor releases, see their changelogs: * [1.9](https://github.com/starburstdata/dbt-trino/blob/1.9.latest/CHANGELOG.md) * [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md) * [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md) * [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md) * [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md) * [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md) * [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md) * [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md) * [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md) * [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md) ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing to `dbt-trino` ## Getting the code ### How to contribute? You can contribute to `dbt-trino` by forking the `dbt-trino` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to: 1. Fork the `dbt-trino` repository 2. Clone your fork locally 3. Check out a new branch for your proposed changes 4. Push changes to your fork 5. Open a pull request against `starburstdata/dbt-trino` from your forked repository ## Setting up an environment There are some tools that will be helpful to you in developing locally. While this is the list relevant for `dbt-trino` development, many of these tools are used commonly across open-source python projects. ### Tools These are the tools used in `dbt-trino` development and testing: - [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.9, 3.10, 3.11, 3.12, and 3.13 - [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests - [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting - [`black`](https://github.com/psf/black) for code formatting - [`isort`](https://pycqa.github.io/isort/) for sorting imports - [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking - [`pre-commit`](https://pre-commit.com) to easily run those checks - [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts - [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple. - [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-trino` repository A deep understanding of these tools in not required to effectively contribute to `dbt-trino`, but we recommend checking out the attached documentation if you're interested in learning more about each one. #### Virtual environments We strongly recommend using virtual environments when developing code in `dbt-trino`. We recommend creating this virtualenv in the root of the `dbt-trino` repository. To create a new virtualenv, run: ```sh python3 -m venv env source env/bin/activate ``` This will create and activate a new Python virtual environment. #### Docker and `docker compose` Docker and `docker compose` are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/). ## Running `dbt-trino` in development ### Installation First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-trino` (and its dependencies) with: ```sh pip install -e . -r dev_requirements.txt ``` When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run. ### Running `dbt-trino` With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv. Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Trino instance if appropriate. ## Testing Once you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that: - Your code changes do not unexpectedly break other established functionality - Your code changes can handle all known edge cases - The functionality you're adding will _keep_ working in the future ### Initial setup To be able to run the tests locally you will need a Trino or Starburst instance. ```sh # to start Trino make start-trino # to start Starburst make start-starburst ``` ### Test commands There are a few methods for running tests locally. #### Makefile There are multiple targets in the Makefile to run common test suites and code checks, most notably: ```sh # Runs integration tests on Trino make dbt-trino-tests # Runs integration tests on Starburst make dbt-starburst-tests ``` > These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks, > unless you use choose a Docker container to run tests. Run `make help` for more info. #### `pre-commit` [`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting. #### `tox` [`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.9, 3.10, 3.11, 3.12, and 3.13 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py39`. The configuration for these tests in located in `tox.ini`. #### `pytest` Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like: ```sh # run all unit tests in a file python3 -m pytest tests/unit/utils.py # run a specific unit test python3 -m pytest tests/unit/test_adapter.py::TestTrinoAdapter::test_acquire_connection # run integration tests python3 -m pytest tests/functional ``` > See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options. The catalog in the dbt profile can be setup through [pytest markers](https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers), if no marker has been specified the memory catalog is used. For example if you want to set the dbt profile to connect to the Delta Lake catalog, annotate your test with `@pytest.mark.delta`, (supported markers are `postgresql`, `delta` or `iceberg`). ``` @pytest.mark.delta def test_run_seed_test(self, project): ... ``` ## Adding CHANGELOG Entry We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost. Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system. Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete! You don't need to worry about which `dbt-trino` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `master` branch. ## Submitting a Pull Request A `dbt-trino` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-trino` repository trigger integration tests against Trino and Starburst. Once all tests are passing and your PR has been approved, a `dbt-trino` maintainer will merge your changes into the master branch. And that's it! Happy developing :tada: ================================================ FILE: LICENSE.txt ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2021 Starburst Data, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: Makefile ================================================ .EXPORT_ALL_VARIABLES: DBT_TEST_USER_1=user1 DBT_TEST_USER_2=user2 DBT_TEST_USER_3=user3 start-trino: docker network create dbt-net || true ./docker/init_trino.bash dbt-trino-tests: start-trino pip install -e . -r dev_requirements.txt tox -r start-starburst: docker network create dbt-net || true ./docker/init_starburst.bash dbt-starburst-tests: start-starburst pip install -e . -r dev_requirements.txt tox -r dev: pre-commit install ================================================ FILE: README.md ================================================ # dbt-trino Starburst dbt         trino [![Build Status](https://github.com/starburstdata/dbt-trino/actions/workflows/ci.yml/badge.svg)](https://github.com/starburstdata/dbt-trino/actions/workflows/ci.yml?query=workflow%3A%22dbt-trino+tests%22+branch%3Amaster+event%3Apush) [![db-starburst-and-trino Slack](https://img.shields.io/static/v1?logo=slack&logoColor=959DA5&label=Slack&labelColor=333a41&message=join%20conversation&color=3AC358)](https://getdbt.slack.com/channels/db-starburst-and-trino) ## Introduction [dbt](https://docs.getdbt.com/docs/introduction) is a data transformation workflow tool that lets teams quickly and collaboratively deploy analytics code, following software engineering best practices like modularity, CI/CD, testing, and documentation. It enables anyone who knows SQL to build production-grade data pipelines. One frequently asked question in the context of using `dbt` tool is: > Can I connect my dbt project to two databases? (see the answered [question](https://docs.getdbt.com/faqs/connecting-to-two-dbs-not-allowed) on the dbt website). **TL;DR** `dbt` stands for transformation as in `T` within `ELT` pipelines, it doesn't move data from source to a warehouse. `dbt-trino` adapter uses [Trino](https://trino.io/) as a underlying query engine to perform query federation across disperse data sources. Trino connects to multiple and diverse data sources ([available connectors](https://trino.io/docs/current/connector.html)) via one dbt connection and process SQL queries at scale. Transformations defined in dbt are passed to Trino which handles these SQL transformation queries and translates them to queries specific to the systems it connects to create tables or views and manipulate data. This repository represents a fork of the [dbt-presto](https://github.com/dbt-labs/dbt-presto) with adaptations to make it work with Trino. ## Compatibility This dbt plugin has been tested against `Trino` version `478`, `Starburst Enterprise` version `477-e.1` and `Starburst Galaxy`. ## Setup & Configuration For information on installing and configuring your profile to authenticate to Trino or Starburst, please refer to [Starburst and Trino Setup](https://docs.getdbt.com/reference/warehouse-setups/trino-setup) in the dbt docs. ### Trino- and Starburst-specific configuration For Trino- and Starburst-specific configuration, you can refer to [Starburst (Trino) configurations](https://docs.getdbt.com/reference/resource-configs/trino-configs) on the dbt docs site. ## Contributing - Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/) in the [#db-starburst-and-trino](https://getdbt.slack.com/channels/db-starburst-and-trino) channel, or on [Trino slack](https://trino.io/slack.html) in the [#python](https://trinodb.slack.com/channels/python) channel, or open [an issue](https://github.com/starburstdata/dbt-trino/issues/new) - Want to help us build dbt-trino? Check out the [Contributing Guide](https://github.com/starburstdata/dbt-trino/blob/HEAD/CONTRIBUTING.md) ### Release process First 5 steps are ONLY relevant for bumping __minor__ version: 1. Create `1.x.latest` branch from the latest tag corresponding to current minor version, e.g. `git checkout -b 1.6.latest v1.6.2` (when bumping to 1.7). Push branch to remote. This branch will be used for potential backports. 2. Create new branch (Do not push below commits to `1.x.latest`). Add a new entry in `.changes/0.0.0.md` that points to the newly created latest branch. 3. Run `changie merge` to update `README.md`. After that, remove changie files and folders related to current minor version. Commit. 4. Bump version of `dbt-tests-adapter`. Commit. 5. Merge these 2 commits into the master branch. Add a `Skip Changlelog` label to the PR. Continue with the next steps for a __minor__ version bump. Start from this point for a __patch__ version bump: 1. Run `Version Bump` workflow. The major and minor part of the dbt version are used to associate dbt-trino's version with the dbt version. 2. Merge the bump PR. Make sure that test suite pass. 3. Run `dbt-trino release` workflow to release `dbt-trino` to PyPi and GitHub. ### Backport process Sometimes it is necessary to backport some changes to some older versions. In that case, create branch from `x.x.latest` branch. There is a `x.x.latest` for each minor version, e.g. `1.3.latest`. Make a fix and open PR back to `x.x.latest`. Create changelog by `changie new` as ususal, as separate changlog for each minor version is kept on every `x.x.latest` branch. After merging, to make a release of that version, just follow instructions from **Release process** section, but run every workflow on `x.x.latest` branch. ## Code of Conduct Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [PyPA Code of Conduct](https://www.pypa.io/en/latest/code-of-conduct/). ================================================ FILE: dbt/adapters/trino/__init__.py ================================================ from dbt.adapters.base import AdapterPlugin from dbt.adapters.trino.column import TrinoColumn # noqa from dbt.adapters.trino.connections import TrinoConnectionManager # noqa from dbt.adapters.trino.connections import TrinoCredentialsFactory from dbt.adapters.trino.relation import TrinoRelation # noqa from dbt.adapters.trino.impl import TrinoAdapter # isort: split from dbt.include import trino Plugin = AdapterPlugin( adapter=TrinoAdapter, # type: ignore credentials=TrinoCredentialsFactory, # type: ignore include_path=trino.PACKAGE_PATH, ) ================================================ FILE: dbt/adapters/trino/__version__.py ================================================ version = "1.10.1" ================================================ FILE: dbt/adapters/trino/catalogs/__init__.py ================================================ from dbt.adapters.trino.catalogs._relation import TrinoCatalogRelation from dbt.adapters.trino.catalogs._trino_catalog_metastore import TrinoCatalogIntegration __all__ = [ "TrinoCatalogIntegration", "TrinoCatalogRelation", ] ================================================ FILE: dbt/adapters/trino/catalogs/_relation.py ================================================ from dataclasses import dataclass from typing import Optional from dbt.adapters.catalogs import CatalogRelation from dbt.adapters.trino import constants @dataclass class TrinoCatalogRelation(CatalogRelation): catalog_type: str = constants.DEFAULT_TRINO_CATALOG.catalog_type catalog_name: Optional[str] = constants.DEFAULT_TRINO_CATALOG.name table_format: Optional[str] = None file_format: Optional[str] = None external_volume: Optional[str] = None storage_uri: Optional[str] = None ================================================ FILE: dbt/adapters/trino/catalogs/_trino_catalog_metastore.py ================================================ from typing import Optional from dbt.adapters.catalogs import CatalogIntegration, CatalogIntegrationConfig from dbt.adapters.contracts.relation import RelationConfig from dbt.adapters.trino import constants from dbt.adapters.trino.catalogs._relation import TrinoCatalogRelation class TrinoCatalogIntegration(CatalogIntegration): """ Catalog type: In Trino, the metastore for a catalog is set when configuring the connector. This cannot be configured using dbt's generated SQL. Documentation: https://trino.io/docs/current/overview/concepts.html#catalog https://trino.io/docs/current/object-storage/metastores.html Table format: For Trino and Starburst SEP, the table format is specified by the connector configuration. Setting table_format here will result in error, as 'type' property is unavailable in Trino and Starburst SEP. If you are using Starburst Galaxy, you can set the default table format to use for this catalog. It will set `type` property to specified table format. Documentation: https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/index.html """ catalog_type = constants.TRINO_CATALOG_TYPE allows_writes = True def __init__(self, config: CatalogIntegrationConfig) -> None: super().__init__(config) self.storage_uri = config.adapter_properties.get("storage_uri") def build_relation(self, model: RelationConfig) -> TrinoCatalogRelation: return TrinoCatalogRelation( catalog_type=self.catalog_type, catalog_name=self.catalog_name, table_format=self.table_format, file_format=self.file_format, external_volume=self.external_volume, storage_uri=self._calculate_storage_uri(model), ) def _calculate_storage_uri(self, model: RelationConfig) -> Optional[str]: if not model.config: return None if model_storage_uri := model.config.get("storage_uri"): return model_storage_uri if not self.external_volume: return None # Default dbt behavior is that if base_location_root is not specified, `_dbt` prefix is added. # Even if base_location_root is explicitly set to None, `_dbt` prefix is still added. # Allow omitting the prefix by setting omit_base_location_root to True. omit_base_location_root = model.config.get("omit_base_location_root") if omit_base_location_root: storage_uri = f"{self.external_volume}/{model.schema}/{model.name}" else: prefix = model.config.get("base_location_root") or "_dbt" storage_uri = f"{self.external_volume}/{prefix}/{model.schema}/{model.name}" if suffix := model.config.get("base_location_subpath"): storage_uri = f"{storage_uri}/{suffix}" return storage_uri ================================================ FILE: dbt/adapters/trino/column.py ================================================ import re from dataclasses import dataclass from typing import ClassVar, Dict from dbt.adapters.base.column import Column from dbt_common.exceptions import DbtRuntimeError # Taken from the MAX_LENGTH variable in # https://github.com/trinodb/trino/blob/master/core/trino-spi/src/main/java/io/trino/spi/type/VarcharType.java TRINO_VARCHAR_MAX_LENGTH = 2147483646 @dataclass class TrinoColumn(Column): TYPE_LABELS: ClassVar[Dict[str, str]] = { "STRING": "VARCHAR", "FLOAT": "DOUBLE", } @property def data_type(self): # when varchar has no defined size, default to unbound varchar # the super().data_type defaults to varchar(256) if self.dtype.lower() == "varchar" and self.char_size is None: return self.dtype return super().data_type def is_string(self) -> bool: return self.dtype.lower() in ["varchar", "char"] def is_float(self) -> bool: return self.dtype.lower() in [ "real", "double precision", "double", ] def is_integer(self) -> bool: return self.dtype.lower() in [ "tinyint", "smallint", "integer", "int", "bigint", ] def is_numeric(self) -> bool: return self.dtype.lower() == "decimal" @classmethod def string_type(cls, size: int) -> str: return "varchar({})".format(size) def string_size(self) -> int: # override the string_size function to handle the unbound varchar case if self.dtype.lower() == "varchar" and self.char_size is None: return TRINO_VARCHAR_MAX_LENGTH return super().string_size() @classmethod def from_description(cls, name: str, raw_data_type: str) -> "Column": # Most of the Trino data types specify a type and not a precision/scale/charsize if not raw_data_type.lower().startswith(("varchar", "char", "decimal")): return cls(name, raw_data_type) # Trino data types that do specify a precision/scale/charsize: match = re.match( r"(?P[^(]+)(?P\([^)]+\))?(?P[\w ]+)?", raw_data_type ) if match is None: raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"') data_type = match.group("type") size_info = match.group("size") data_type_suffix = match.group("type_suffix") if data_type_suffix: data_type += data_type_suffix char_size = None numeric_precision = None numeric_scale = None if size_info is not None: # strip out the parentheses size_info = size_info[1:-1] parts = size_info.split(",") if len(parts) == 1: try: char_size = int(parts[0]) except ValueError: raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[0]}" to an integer' ) elif len(parts) == 2: try: numeric_precision = int(parts[0]) except ValueError: raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[0]}" to an integer' ) try: numeric_scale = int(parts[1]) except ValueError: raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[1]}" to an integer' ) return cls(name, data_type, char_size, numeric_precision, numeric_scale) ================================================ FILE: dbt/adapters/trino/connections.py ================================================ import decimal import os import re from abc import ABCMeta, abstractmethod from contextlib import contextmanager from dataclasses import dataclass, field from datetime import date, datetime from enum import Enum from typing import Any, Dict, List, Optional, Union import sqlparse import trino from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger from dbt.adapters.exceptions.connection import FailedToConnectError from dbt.adapters.sql import SQLConnectionManager from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError from dbt_common.helper_types import Port from trino.transaction import IsolationLevel from dbt.adapters.trino.__version__ import version logger = AdapterLogger("Trino") PREPARED_STATEMENTS_ENABLED_DEFAULT = True class HttpScheme(Enum): HTTP = "http" HTTPS = "https" class TrinoCredentialsFactory: @classmethod def _create_trino_profile(cls, profile): if "method" in profile: method = profile["method"] if method == "ldap": return TrinoLdapCredentials elif method == "certificate": return TrinoCertificateCredentials elif method == "kerberos": return TrinoKerberosCredentials elif method == "jwt": return TrinoJwtCredentials elif method == "oauth": return TrinoOauthCredentials elif method == "oauth_console": return TrinoOauthConsoleCredentials return TrinoNoneCredentials @classmethod def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]: klazz = cls._create_trino_profile(kwargs) return klazz.translate_aliases(kwargs, recurse) @classmethod def validate(cls, data: Any): klazz = cls._create_trino_profile(data) return klazz.validate(data) @classmethod def from_dict(cls, data: Any): klazz = cls._create_trino_profile(data) return klazz.from_dict(data) class TrinoCredentials(Credentials, metaclass=ABCMeta): _ALIASES = {"catalog": "database"} @property def type(self): return "trino" @property def unique_field(self): return self.host def _connection_keys(self): return ( "method", "host", "port", "user", "database", "schema", "cert", "prepared_statements_enabled", ) @abstractmethod def trino_auth(self) -> Optional[trino.auth.Authentication]: pass @dataclass class TrinoNoneCredentials(TrinoCredentials): host: str port: Port user: str client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_scheme: HttpScheme = HttpScheme.HTTP http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None suppress_cert_warning: Optional[bool] = None @property def method(self): return "none" def trino_auth(self): return trino.constants.DEFAULT_AUTH @dataclass class TrinoCertificateCredentials(TrinoCredentials): host: str port: Port client_certificate: str client_private_key: str user: Optional[str] = None client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "certificate" def trino_auth(self): return trino.auth.CertificateAuthentication( self.client_certificate, self.client_private_key ) @dataclass class TrinoLdapCredentials(TrinoCredentials): host: str port: Port user: str password: str impersonation_user: Optional[str] = None client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "ldap" def trino_auth(self): return trino.auth.BasicAuthentication(username=self.user, password=self.password) @dataclass class TrinoKerberosCredentials(TrinoCredentials): host: str port: Port user: str client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None keytab: Optional[str] = None principal: Optional[str] = None krb5_config: Optional[str] = None service_name: Optional[str] = "trino" mutual_authentication: Optional[bool] = False cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None force_preemptive: Optional[bool] = False hostname_override: Optional[str] = None sanitize_mutual_error_response: Optional[bool] = True delegate: Optional[bool] = False session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "kerberos" def trino_auth(self): os.environ["KRB5_CLIENT_KTNAME"] = self.keytab return trino.auth.KerberosAuthentication( config=self.krb5_config, service_name=self.service_name, principal=self.principal, mutual_authentication=self.mutual_authentication, ca_bundle=self.cert, force_preemptive=self.force_preemptive, hostname_override=self.hostname_override, sanitize_mutual_error_response=self.sanitize_mutual_error_response, delegate=self.delegate, ) @dataclass class TrinoJwtCredentials(TrinoCredentials): host: str port: Port jwt_token: str user: Optional[str] = None client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "jwt" def trino_auth(self): return trino.auth.JWTAuthentication(self.jwt_token) @dataclass class TrinoOauthCredentials(TrinoCredentials): host: str port: Port user: Optional[str] = None client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None OAUTH = trino.auth.OAuth2Authentication( redirect_auth_url_handler=trino.auth.WebBrowserRedirectHandler() ) suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "oauth" def trino_auth(self): return self.OAUTH @dataclass class TrinoOauthConsoleCredentials(TrinoCredentials): host: str port: Port user: Optional[str] = None client_tags: Optional[List[str]] = None roles: Optional[Dict[str, str]] = None cert: Optional[Union[str, bool]] = None http_headers: Optional[Dict[str, str]] = None session_properties: Dict[str, Any] = field(default_factory=dict) prepared_statements_enabled: bool = PREPARED_STATEMENTS_ENABLED_DEFAULT retries: Optional[int] = trino.constants.DEFAULT_MAX_ATTEMPTS timezone: Optional[str] = None OAUTH = trino.auth.OAuth2Authentication( redirect_auth_url_handler=trino.auth.ConsoleRedirectHandler() ) suppress_cert_warning: Optional[bool] = None @property def http_scheme(self): return HttpScheme.HTTPS @property def method(self): return "oauth_console" def trino_auth(self): return self.OAUTH class ConnectionWrapper(object): """Wrap a Trino connection in a way that accomplishes two tasks: - prefetch results from execute() calls so that trino calls actually persist to the db but then present the usual cursor interface - provide `cancel()` on the same object as `commit()`/`rollback()`/... """ def __init__(self, handle, prepared_statements_enabled): self.handle = handle self._cursor = None self._fetch_result = None self._prepared_statements_enabled = prepared_statements_enabled def cursor(self): self._cursor = self.handle.cursor() return self def cancel(self): if self._cursor is not None: self._cursor.cancel() def close(self): # this is a noop on trino, but pass it through anyway self.handle.close() def commit(self): pass def rollback(self): pass def start_transaction(self): pass def fetchall(self): if self._cursor is None: return None if self._fetch_result is not None: ret = self._fetch_result self._fetch_result = None return ret return None def fetchone(self): if self._cursor is None: return None if self._fetch_result is not None: ret = self._fetch_result[0] self._fetch_result = None return ret return None def fetchmany(self, size): if self._cursor is None: return None if self._fetch_result is not None: ret = self._fetch_result[:size] self._fetch_result = None return ret return None def execute(self, sql, bindings=None): if not self._prepared_statements_enabled and bindings is not None: # DEPRECATED: by default prepared statements are used. # Code is left as an escape hatch if prepared statements # are failing. bindings = tuple(self._escape_value(b) for b in bindings) sql = sql % bindings result = self._cursor.execute(sql) else: result = self._cursor.execute(sql, params=bindings) self._fetch_result = self._cursor.fetchall() return result @property def description(self): return self._cursor.description @classmethod def _escape_value(cls, value): """A not very comprehensive system for escaping bindings. I think "'" (a single quote) is the only character that matters. """ numbers = (decimal.Decimal, int, float) if value is None: return "NULL" elif isinstance(value, str): return "'{}'".format(value.replace("'", "''")) elif isinstance(value, numbers): return value elif isinstance(value, datetime): time_formatted = value.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] return "TIMESTAMP '{}'".format(time_formatted) elif isinstance(value, date): date_formatted = value.strftime("%Y-%m-%d") return "DATE '{}'".format(date_formatted) else: raise ValueError("Cannot escape {}".format(type(value))) @dataclass class TrinoAdapterResponse(AdapterResponse): query: str = "" query_id: str = "" class TrinoConnectionManager(SQLConnectionManager): TYPE = "trino" behavior_flags = None def __init__(self, profile, mp_context, behavior_flags=None) -> None: super().__init__(profile, mp_context) TrinoConnectionManager.behavior_flags = behavior_flags @contextmanager def exception_handler(self, sql): try: yield except trino.exceptions.Error as e: msg = str(e) if "Failed to establish a new connection" in msg: raise FailedToConnectError(msg) from e if isinstance(e, trino.exceptions.TrinoQueryError): logger.debug("Trino query id: {}".format(e.query_id)) logger.debug("Trino error: {}".format(msg)) raise DbtDatabaseError(msg) except Exception as e: msg = str(e) if isinstance(e, DbtRuntimeError): # during a sql query, an internal to dbt exception was raised. # this sounds a lot like a signal handler and probably has # useful information, so raise it without modification. raise raise DbtRuntimeError(msg) from e # For connection in auto-commit mode there is no need to start # separate transaction. If using auto-commit, the client will # create a new transaction and commit/rollback for each query def add_begin_query(self): pass def add_commit_query(self): pass @classmethod def open(cls, connection): if connection.state == "open": logger.debug("Connection is already open, skipping open.") return connection credentials = connection.credentials # set default `cert` value, according to # require_certificate_validation behavior flag if credentials.cert is None: req_cert_val_flag = cls.behavior_flags.require_certificate_validation.setting if req_cert_val_flag: credentials.cert = True if credentials.suppress_cert_warning: import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # it's impossible for trino to fail here as 'connections' are actually # just cursor factories. trino_conn = trino.dbapi.connect( host=credentials.host, port=credentials.port, user=credentials.impersonation_user if getattr(credentials, "impersonation_user", None) else credentials.user, client_tags=credentials.client_tags, roles=credentials.roles, catalog=credentials.database, schema=credentials.schema, http_scheme=credentials.http_scheme.value, http_headers=credentials.http_headers, session_properties=credentials.session_properties, auth=credentials.trino_auth(), max_attempts=credentials.retries, isolation_level=IsolationLevel.AUTOCOMMIT, source=f"dbt-trino-{version}", verify=credentials.cert, timezone=credentials.timezone, ) connection.state = "open" connection.handle = ConnectionWrapper(trino_conn, credentials.prepared_statements_enabled) return connection @classmethod def get_response(cls, cursor) -> TrinoAdapterResponse: code = cursor._cursor.update_type if code is None: code = "SUCCESS" rows_affected = cursor._cursor.rowcount if rows_affected == -1: message = f"{code}" else: message = f"{code} ({rows_affected:_} rows)" return TrinoAdapterResponse( _message=message, query=cursor._cursor.query, query_id=cursor._cursor.query_id, rows_affected=rows_affected, ) # type: ignore def cancel(self, connection): connection.handle.cancel() def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): connection = None cursor = None # TODO: is this sufficient? Largely copy+pasted from snowflake, so # there's some common behavior here we can maybe factor out into the # SQLAdapter? queries = [q.rstrip(";") for q in sqlparse.split(sql)] for individual_query in queries: # hack -- after the last ';', remove comments and don't run # empty queries. this avoids using exceptions as flow control, # and also allows us to return the status of the last cursor without_comments = re.sub( re.compile("^.*(--.*)$", re.MULTILINE), "", individual_query ).strip() if without_comments == "": continue parent = super(TrinoConnectionManager, self) connection, cursor = parent.add_query( individual_query, auto_begin, bindings, abridge_sql_log ) if cursor is None: conn = self.get_thread_connection() if conn is None or conn.name is None: conn_name = "" else: conn_name = conn.name raise DbtRuntimeError( "Tried to run an empty query on model '{}'. If you are " "conditionally running\nsql, eg. in a model hook, make " "sure your `else` clause contains valid sql!\n\n" "Provided SQL:\n{}".format(conn_name, sql) ) return connection, cursor @classmethod def data_type_code_to_name(cls, type_code) -> str: return type_code.split("(")[0].upper() ================================================ FILE: dbt/adapters/trino/constants.py ================================================ from types import SimpleNamespace ADAPTER_TYPE = "trino" TRINO_CATALOG_TYPE = "trino" DEFAULT_TRINO_CATALOG = SimpleNamespace( name="trino_default", catalog_name="trino_default", catalog_type="trino", table_format=None, file_format=None, external_volume=None, adapter_properties={}, ) ================================================ FILE: dbt/adapters/trino/impl.py ================================================ from dataclasses import dataclass from typing import Dict, List, Optional import agate from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport from dbt.adapters.base.meta import available from dbt.adapters.capability import ( Capability, CapabilityDict, CapabilitySupport, Support, ) from dbt.adapters.catalogs import CatalogRelation from dbt.adapters.contracts.relation import RelationConfig from dbt.adapters.sql import SQLAdapter from dbt_common.behavior_flags import BehaviorFlag from dbt_common.contracts.constraints import ConstraintType from dbt_common.exceptions import DbtDatabaseError from dbt.adapters.trino import ( TrinoColumn, TrinoConnectionManager, TrinoRelation, constants, parse_model, ) from dbt.adapters.trino.catalogs import TrinoCatalogIntegration @dataclass class TrinoConfig(AdapterConfig): properties: Optional[Dict[str, str]] = None view_security: Optional[str] = "definer" class TrinoAdapter(SQLAdapter): Relation = TrinoRelation Column = TrinoColumn ConnectionManager = TrinoConnectionManager AdapterSpecificConfigs = TrinoConfig CATALOG_INTEGRATIONS = [ TrinoCatalogIntegration, ] CONSTRAINT_SUPPORT = { ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, ConstraintType.not_null: ConstraintSupport.ENFORCED, ConstraintType.unique: ConstraintSupport.NOT_SUPPORTED, ConstraintType.primary_key: ConstraintSupport.NOT_SUPPORTED, ConstraintType.foreign_key: ConstraintSupport.NOT_SUPPORTED, } _capabilities: CapabilityDict = CapabilityDict( { Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full), # No information about last table modification in information_schema.tables Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Unsupported), Capability.TableLastModifiedMetadataBatch: CapabilitySupport( support=Support.Unsupported ), } ) def __init__(self, config, mp_context) -> None: super().__init__(config, mp_context) self.connections = self.ConnectionManager(config, mp_context, self.behavior) self.add_catalog_integration(constants.DEFAULT_TRINO_CATALOG) @property def _behavior_flags(self) -> List[BehaviorFlag]: return [ { # type: ignore "name": "require_certificate_validation", "default": False, "description": ( "SSL certificate validation is disabled by default. " "It is legacy behavior which will be changed in future releases. " "It is strongly advised to enable `require_certificate_validation` flag " "or explicitly set `cert` configuration to `True` for security reasons. " "You may receive an error after that if your SSL setup is incorrect." ), } ] @classmethod def date_function(cls): return "datenow()" @classmethod def convert_text_type(cls, agate_table, col_idx): return "VARCHAR" @classmethod def convert_number_type(cls, agate_table, col_idx): decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) return "DOUBLE" if decimals else "INTEGER" @classmethod def convert_datetime_type(cls, agate_table, col_idx): return "TIMESTAMP" @classmethod def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: return "DATE" def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: return f"{add_to} + interval '{number}' {interval}" def get_columns_in_relation(self, relation): try: return super().get_columns_in_relation(relation) except DbtDatabaseError as exc: if "does not exist" in str(exc): return [] else: raise def valid_incremental_strategies(self): return ["append", "merge", "delete+insert", "microbatch"] @available def build_catalog_relation(self, model: RelationConfig) -> Optional[CatalogRelation]: """ Builds a relation for a given configuration. This method uses the provided configuration to determine the appropriate catalog integration and config parser for building the relation. It defaults to the trino catalog if none is provided in the configuration for backward compatibility. Args: model (RelationConfig): `config.model` (not `model`) from the jinja context Returns: Any: The constructed relation object generated through the catalog integration and parser """ if catalog := parse_model.catalog_name(model): catalog_integration = self.get_catalog_integration(catalog) return catalog_integration.build_relation(model) return None ================================================ FILE: dbt/adapters/trino/parse_model.py ================================================ from typing import Optional from dbt.adapters.catalogs import CATALOG_INTEGRATION_MODEL_CONFIG_NAME # type: ignore from dbt.adapters.contracts.relation import RelationConfig from dbt.adapters.trino import constants def catalog_name(model: RelationConfig) -> Optional[str]: """Extract catalog name from model configuration""" if not hasattr(model, "config") or not model.config: return None if catalog := model.config.get(CATALOG_INTEGRATION_MODEL_CONFIG_NAME): return catalog return constants.DEFAULT_TRINO_CATALOG.name ================================================ FILE: dbt/adapters/trino/relation.py ================================================ from dataclasses import dataclass, field from dbt.adapters.base.relation import BaseRelation, EventTimeFilter, Policy from dbt.adapters.contracts.relation import ComponentName @dataclass(frozen=True, eq=False, repr=False) class TrinoRelation(BaseRelation): quote_policy: Policy = field(default_factory=lambda: Policy()) require_alias: bool = False # Overridden as Trino converts relation identifiers to lowercase def _is_exactish_match(self, field: ComponentName, value: str) -> bool: return self.path.get_lowered_part(field) == value.lower() # Overridden because Trino cannot compare a TIMESTAMP column with a VARCHAR literal. def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str: """ Returns "" if start and end are both None """ filter = "" if event_time_filter.start and event_time_filter.end: filter = f"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}' and {event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'" elif event_time_filter.start: filter = f"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}'" elif event_time_filter.end: filter = f"{event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'" return filter ================================================ FILE: dbt/include/trino/__init__.py ================================================ import os PACKAGE_PATH = os.path.dirname(__file__) ================================================ FILE: dbt/include/trino/dbt_project.yml ================================================ name: dbt_trino version: 1.0 config-version: 2 macro-paths: ["macros"] ================================================ FILE: dbt/include/trino/macros/adapters.sql ================================================ -- - get_catalog -- - list_relations_without_caching -- - get_columns_in_relation {% macro trino__get_columns_in_relation(relation) -%} {%- set sql -%} select column_name, data_type from {{ relation.information_schema() }}.columns where table_catalog = '{{ relation.database | lower }}' and table_schema = '{{ relation.schema | lower }}' and table_name = '{{ relation.identifier | lower}}' {%- endset -%} {%- set result = run_query(sql) -%} {% set maximum = 10000 %} {% if (result | length) >= maximum %} {% set msg %} Too many columns in relation {{ relation }}! dbt can only get information about relations with fewer than {{ maximum }} columns. {% endset %} {% do exceptions.raise_compiler_error(msg) %} {% endif %} {% set columns = [] %} {% for row in result %} {% do columns.append(api.Column.from_description(row['column_name'].lower(), row['data_type'])) %} {% endfor %} {% do return(columns) %} {% endmacro %} {% macro trino__list_relations_without_caching(relation) %} {% call statement('list_relations_without_caching', fetch_result=True) -%} select t.table_catalog as database, t.table_name as name, t.table_schema as schema, case when mv.name is not null then 'materialized_view' when t.table_type = 'BASE TABLE' then 'table' when t.table_type = 'VIEW' then 'view' else t.table_type end as table_type from {{ relation.information_schema() }}.tables t left join ( select * from system.metadata.materialized_views where catalog_name = '{{ relation.database | lower }}' and schema_name = '{{ relation.schema | lower }}') mv on mv.catalog_name = t.table_catalog and mv.schema_name = t.table_schema and mv.name = t.table_name where t.table_schema = '{{ relation.schema | lower }}' {% endcall %} {{ return(load_result('list_relations_without_caching').table) }} {% endmacro %} {% macro trino__reset_csv_table(model, full_refresh, old_relation, agate_table) %} {{ adapter.drop_relation(old_relation) }} {{ return(create_csv_table(model, agate_table)) }} {% endmacro %} {% macro trino__create_csv_table(model, agate_table) %} {%- set column_override = model['config'].get('column_types', {}) -%} {%- set quote_seed_column = model['config'].get('quote_columns', None) -%} {% set sql %} create table {{ this.render() }} ( {%- for col_name in agate_table.column_names -%} {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%} {%- set type = column_override.get(col_name, inferred_type) -%} {%- set column_name = (col_name | string) -%} {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%} {%- endfor -%} ) {{ properties() }} {% endset %} {% call statement('_') -%} {{ sql }} {%- endcall %} {{ return(sql) }} {% endmacro %} {% macro properties(temporary=False) %} {%- set _properties = config.get('properties') -%} {%- set table_format = config.get('table_format') -%} {%- set file_format = config.get('file_format') -%} {%- set catalog_relation = adapter.build_catalog_relation(config.model) -%} {%- set catalog_table_format = catalog_relation.table_format -%} {%- set catalog_file_format = catalog_relation.file_format -%} {%- set catalog_storage_uri = catalog_relation.storage_uri -%} {%- if file_format -%} {%- if _properties -%} {%- if _properties.format -%} {% set msg %} You can specify either 'file_format' or 'properties.format' configurations, but not both. {% endset %} {% do exceptions.raise_compiler_error(msg) %} {%- else -%} {%- do _properties.update({'format': "'" ~ file_format ~ "'"}) -%} {%- endif -%} {%- else -%} {%- set _properties = {'format': "'" ~ file_format ~ "'"} -%} {%- endif -%} {%- elif (not _properties.format) and catalog_file_format -%} {%- if _properties -%} {%- do _properties.update({'format': "'" ~ catalog_file_format ~ "'"}) -%} {%- else -%} {%- set _properties = {'format': "'" ~ catalog_file_format ~ "'"} -%} {%- endif -%} {%- endif -%} {%- if table_format -%} {%- if _properties -%} {%- if _properties.type -%} {% set msg %} You can specify either 'table_format' or 'properties.type' configurations, but not both. {% endset %} {% do exceptions.raise_compiler_error(msg) %} {%- else -%} {%- do _properties.update({'type': "'" ~ table_format ~ "'"}) -%} {%- endif -%} {%- else -%} {%- set _properties = {'type': "'" ~ table_format ~ "'"} -%} {%- endif -%} {%- elif (not _properties.type) and (catalog_table_format is not none) -%} {%- if _properties -%} {%- do _properties.update({'type': "'" ~ catalog_table_format ~ "'"}) -%} {%- else -%} {%- set _properties = {'type': "'" ~ catalog_table_format ~ "'"} -%} {%- endif -%} {%- endif -%} {%- if not _properties.location and catalog_storage_uri -%} {%- if _properties -%} {%- do _properties.update({'location': "'" ~ catalog_storage_uri ~ "'"}) -%} {%- else -%} {%- set _properties = {'location': "'" ~ catalog_storage_uri ~ "'"} -%} {%- endif -%} {%- endif -%} {%- if temporary -%} {%- if _properties -%} {%- if _properties.location -%} {%- do _properties.update({'location': _properties.location[:-1] ~ "__dbt_tmp'"}) -%} {%- endif -%} {%- endif -%} {%- endif -%} {%- if _properties is not none -%} WITH ( {%- for key, value in _properties.items() -%} {{ key }} = {{ value }} {%- if not loop.last -%}{{ ',\n ' }}{%- endif -%} {%- endfor -%} ) {%- endif -%} {%- endmacro -%} {% macro comment(comment) %} {%- set persist_docs = model['config'].get('persist_docs') -%} {%- if persist_docs -%} {%- set persist_relation = persist_docs.get('relation') -%} {%- if persist_relation and comment is not none and comment|length > 0 -%} comment '{{ comment | replace("'", "''") }}' {%- endif -%} {%- endif -%} {%- endmacro -%} {% macro trino__create_table_as(temporary, relation, sql, on_exists=None) -%} {%- set or_replace = ' or replace' if on_exists == 'replace' else '' -%} {%- set if_not_exists = ' if not exists' if on_exists == 'skip' else '' -%} {%- set contract_config = config.get('contract') -%} {%- if contract_config.enforced -%} create{{ or_replace }} table{{ if_not_exists }} {{ relation }} {{ get_table_columns_and_constraints() }} {{ get_assert_columns_equivalent(sql) }} {%- set sql = get_select_subquery(sql) %} {{ comment(model.get('description')) }} {{ properties(temporary) }} ; insert into {{ relation }} ( {{ sql }} ) ; {%- else %} create{{ or_replace }} table{{ if_not_exists }} {{ relation }} {{ comment(model.get('description')) }} {{ properties(temporary) }} as ( {{ sql }} ); {%- endif %} {% endmacro %} {% macro trino__create_view_as(relation, sql) -%} {%- set view_security = config.get('view_security', 'definer') -%} {%- if view_security not in ['definer', 'invoker'] -%} {%- set log_message = 'Invalid value for view_security (%s) specified. Setting default value (%s).' % (view_security, 'definer') -%} {% do log(log_message) %} {%- set on_table_exists = 'definer' -%} {% endif %} create or replace view {{ relation }} {%- set contract_config = config.get('contract') -%} {%- if contract_config.enforced -%} {{ get_assert_columns_equivalent(sql) }} {%- endif %} security {{ view_security }} as {{ sql }} ; {% endmacro %} {%- macro trino__get_drop_sql(relation) -%} {% set relation_type = relation.type|replace("_", " ") %} drop {{ relation_type }} if exists {{ relation }} {% endmacro %} {# see this issue: https://github.com/dbt-labs/dbt/issues/2267 #} {% macro trino__information_schema_name(database) -%} {%- if database -%} {{ database }}.INFORMATION_SCHEMA {%- else -%} INFORMATION_SCHEMA {%- endif -%} {%- endmacro %} {# On Trino, 'cascade' is not supported so we have to manually cascade. #} {% macro trino__drop_schema(relation) -%} {% for row in list_relations_without_caching(relation) %} {% set rel_db = row[0] %} {% set rel_identifier = row[1] %} {% set rel_schema = row[2] %} {% set rel_type = api.Relation.get_relation_type(row[3]) %} {% set existing = api.Relation.create(database=rel_db, schema=rel_schema, identifier=rel_identifier, type=rel_type) %} {% do drop_relation(existing) %} {% endfor %} {%- call statement('drop_schema') -%} drop schema if exists {{ relation }} {% endcall %} {% endmacro %} {% macro trino__rename_relation(from_relation, to_relation) -%} {% set from_relation_type = from_relation.type|replace("_", " ") %} {% call statement('rename_relation') -%} alter {{ from_relation_type }} {{ from_relation }} rename to {{ to_relation }} {%- endcall %} {% endmacro %} {% macro trino__alter_relation_comment(relation, relation_comment) -%} comment on {{ relation.type }} {{ relation }} is '{{ relation_comment | replace("'", "''") }}'; {% endmacro %} {% macro trino__alter_column_comment(relation, column_dict) %} {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute="name") | list %} {% for column_name in column_dict if (column_name in existing_columns) %} {% set comment = column_dict[column_name]['description'] %} {%- if comment|length -%} comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is '{{ comment | replace("'", "''") }}'; {%- else -%} comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is null; {%- endif -%} {% endfor %} {% endmacro %} {% macro trino__list_schemas(database) -%} {% call statement('list_schemas', fetch_result=True, auto_begin=False) %} select schema_name from {{ information_schema_name(database) }}.schemata {% endcall %} {{ return(load_result('list_schemas').table) }} {% endmacro %} {% macro trino__check_schema_exists(information_schema, schema) -%} {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) -%} select count(*) from {{ information_schema }}.schemata where catalog_name = '{{ information_schema.database }}' and schema_name = '{{ schema | lower }}' {%- endcall %} {{ return(load_result('check_schema_exists').table) }} {% endmacro %} {% macro trino__get_binding_char() %} {%- if target.prepared_statements_enabled|as_bool -%} {{ return('?') }} {%- else -%} {{ return('%s') }} {%- endif -%} {% endmacro %} {% macro trino__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %} {% if add_columns is none %} {% set add_columns = [] %} {% endif %} {% if remove_columns is none %} {% set remove_columns = [] %} {% endif %} {% for column in add_columns %} {% set sql -%} alter {{ relation.type }} {{ relation }} add column {{ adapter.quote(column.name) }} {{ column.data_type }} {%- endset -%} {% do run_query(sql) %} {% endfor %} {% for column in remove_columns %} {% set sql -%} alter {{ relation.type }} {{ relation }} drop column {{ adapter.quote(column.name) }} {%- endset -%} {% do run_query(sql) %} {% endfor %} {% endmacro %} {% macro create_or_replace_view() %} {%- set identifier = model['alias'] -%} {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} {%- set target_relation = api.Relation.create( identifier=identifier, schema=schema, database=database, type='view') -%} {% set grant_config = config.get('grants') %} {{ run_hooks(pre_hooks) }} -- If there is another object delete it {%- if old_relation is not none and not old_relation.is_view -%} {{ handle_existing_table(should_full_refresh(), old_relation) }} {%- endif -%} -- build model {% call statement('main') -%} {{ get_create_view_as_sql(target_relation, sql) }} {%- endcall %} {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %} {% do apply_grants(target_relation, grant_config, should_revoke=True) %} {{ run_hooks(post_hooks) }} {{ return({'relations': [target_relation]}) }} {% endmacro %} {% macro trino__alter_column_type(relation, column_name, new_column_type) %} {# 1. Create a new column (w/ temp name and correct type) 2. Copy data over to it 3. Drop the existing column 4. Rename the new column to existing column #} {%- set tmp_column = column_name + "__dbt_alter" -%} {% call statement('alter_column_type') %} alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }}; update {{ relation }} set {{ adapter.quote(tmp_column) }} = CAST({{ adapter.quote(column_name) }} AS {{ new_column_type }}); alter table {{ relation }} drop column {{ adapter.quote(column_name) }}; alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} {% endcall %} {% endmacro %} ================================================ FILE: dbt/include/trino/macros/apply_grants.sql ================================================ {% macro trino__get_show_grant_sql(relation) -%} select grantee, lower(privilege_type) as privilege_type from information_schema.table_privileges where table_catalog = '{{ relation.database }}' and table_schema = '{{ relation.schema }}' and table_name = '{{ relation.identifier }}' {%- endmacro %} {% macro trino__copy_grants() %} {# -- This macro should return true or false depending on the answer to -- following question: -- when an object is fully replaced on your database, do grants copy over? -- e.g. on Postgres this is never true, -- on Spark this is different for views vs. non-Delta tables vs. Delta tables, -- on Snowflake it depends on the user-supplied copy_grants configuration. -- true by default, which means “play it safe”: grants MIGHT have copied over, -- so dbt will run an extra query to check them + calculate diffs. #} {{ return(False) }} {% endmacro %} {%- macro trino__get_grant_sql(relation, privilege, grantees) -%} grant {{ privilege }} on {{ relation }} to {{ adapter.quote(grantees[0]) }} {%- endmacro %} {%- macro trino__support_multiple_grantees_per_dcl_statement() -%} {# -- This macro should return true or false depending on the answer to -- following question: -- does this database support grant {privilege} to user_a, user_b, ...? -- or do user_a + user_b need their own separate grant statements? #} {{ return(False) }} {%- endmacro -%} {% macro trino__call_dcl_statements(dcl_statement_list) %} {% for dcl_statement in dcl_statement_list %} {% call statement('grant_or_revoke') %} {{ dcl_statement }} {% endcall %} {% endfor %} {% endmacro %} ================================================ FILE: dbt/include/trino/macros/catalog.sql ================================================ {% macro trino__get_catalog(information_schema, schemas) -%} {% set query %} with tables as ( {{ trino__get_catalog_tables_sql(information_schema) }} {{ trino__get_catalog_schemas_where_clause_sql(schemas) }} ), columns as ( {{ trino__get_catalog_columns_sql(information_schema) }} {{ trino__get_catalog_schemas_where_clause_sql(schemas) }} ), table_comment as ( {{ trino__get_catalog_table_comment_schemas_sql(information_schema, schemas) }} ) {{ trino__get_catalog_results_sql() }} {%- endset -%} {{ return(run_query(query)) }} {%- endmacro %} {% macro trino__get_catalog_relations(information_schema, relations) -%} {% set query %} with tables as ( {{ trino__get_catalog_tables_sql(information_schema) }} {{ trino__get_catalog_relations_where_clause_sql(relations) }} ), columns as ( {{ trino__get_catalog_columns_sql(information_schema) }} {{ trino__get_catalog_relations_where_clause_sql(relations) }} ), table_comment as ( {{ trino__get_catalog_table_comment_relations_sql(information_schema, relations) }} ) {{ trino__get_catalog_results_sql() }} {%- endset -%} {{ return(run_query(query)) }} {%- endmacro %} {% macro trino__get_catalog_tables_sql(information_schema) -%} select table_catalog as "table_database", table_schema as "table_schema", table_name as "table_name", table_type as "table_type", null as "table_owner" from {{ information_schema }}.tables {%- endmacro %} {% macro trino__get_catalog_columns_sql(information_schema) -%} select table_catalog as "table_database", table_schema as "table_schema", table_name as "table_name", column_name as "column_name", ordinal_position as "column_index", data_type as "column_type", comment as "column_comment" from {{ information_schema }}.columns {%- endmacro %} {% macro trino__get_catalog_table_comment_schemas_sql(information_schema, schemas) -%} select catalog_name as "table_database", schema_name as "table_schema", table_name as "table_name", comment as "table_comment" from system.metadata.table_comments where catalog_name = '{{ information_schema.database }}' and schema_name != 'information_schema' and schema_name in ('{{ schemas | join("','") | lower }}') {%- endmacro %} {% macro trino__get_catalog_table_comment_relations_sql(information_schema, relations) -%} {%- for relation in relations %} select catalog_name as "table_database", schema_name as "table_schema", table_name as "table_name", comment as "table_comment" from system.metadata.table_comments where catalog_name = '{{ information_schema.database }}' and schema_name != 'information_schema' and {% if relation.schema and relation.identifier %} ( schema_name = '{{ relation.schema | lower }}' and table_name = '{{ relation.identifier | lower }}' ) {% elif relation.schema %} ( schema_name = '{{ relation.schema | lower }}' ) {% else %} {% do exceptions.raise_compiler_error( '`get_catalog_relations` requires a list of relations, each with a schema' ) %} {% endif %} {%- if not loop.last %} union all {% endif -%} {%- endfor -%} {%- endmacro %} {% macro trino__get_catalog_results_sql() -%} select table_database, table_schema, table_name, table_type, table_owner, column_name, column_index, column_type, column_comment, table_comment from tables join columns using ("table_database", "table_schema", "table_name") join table_comment using ("table_database", "table_schema", "table_name") order by "column_index" {%- endmacro %} {% macro trino__get_catalog_schemas_where_clause_sql(schemas) -%} where table_schema != 'information_schema' and table_schema in ('{{ schemas | join("','") | lower }}') {%- endmacro %} {% macro trino__get_catalog_relations_where_clause_sql(relations) -%} where table_schema != 'information_schema' and ( {%- for relation in relations -%} {% if relation.schema and relation.identifier %} ( table_schema = '{{ relation.schema | lower }}' and table_name = '{{ relation.identifier | lower }}' ) {% elif relation.schema %} ( table_schema = '{{ relation.schema | lower }}' ) {% else %} {% do exceptions.raise_compiler_error( '`get_catalog_relations` requires a list of relations, each with a schema' ) %} {% endif %} {%- if not loop.last %} or {% endif -%} {%- endfor -%} ) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/materializations/incremental.sql ================================================ {% macro get_incremental_tmp_relation_type(strategy, unique_key, language) %} /* {# If we are running multiple statements (DELETE + INSERT), we must first save the model query results as a temporary table in order to guarantee consistent inputs to both statements. If we are running a single statement (MERGE or INSERT alone), we can save the model query definition as a view instead, for faster overall incremental processing. #} */ {%- set views_enabled = config.get('views_enabled', true) -%} {% if language == 'sql' and (views_enabled and (strategy in ('default', 'append', 'merge') or (unique_key is none))) %} {{ return('view') }} {% else %} {#-- play it safe -- #} {{ return('table') }} {% endif %} {% endmacro %} {% materialization incremental, adapter='trino', supported_languages=['sql'] -%} {#-- configs --#} {%- set unique_key = config.get('unique_key') -%} {%- set full_refresh_mode = (should_full_refresh()) -%} {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%} {%- set language = model['language'] -%} {%- set on_table_exists = config.get('on_table_exists', 'rename') -%} {% if on_table_exists not in ['rename', 'drop', 'replace'] %} {%- set log_message = 'Invalid value for on_table_exists (%s) specified. Setting default value (%s).' % (on_table_exists, 'rename') -%} {% do log(log_message) %} {%- set on_table_exists = 'rename' -%} {% endif %} {#-- Get the incremental_strategy and the macro to use for the strategy --#} {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} {#-- relations --#} {%- set existing_relation = load_cached_relation(this) -%} {%- set target_relation = this.incorporate(type='table') -%} {#-- The temp relation will be a view (faster) or temp table, depending on upsert/merge strategy --#} {%- set tmp_relation_type = get_incremental_tmp_relation_type(incremental_strategy, unique_key, language) -%} {%- set tmp_relation = make_temp_relation(this).incorporate(type=tmp_relation_type) -%} {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} {#-- the temp_ and backup_ relation should not already exist in the database; get_relation -- will return None in that case. Otherwise, we get a relation that we can drop -- later, before we try to use this name for the current operation.#} {%- set preexisting_tmp_relation = load_cached_relation(tmp_relation)-%} {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%} {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} {#--- grab current tables grants config for comparision later on#} {% set grant_config = config.get('grants') %} -- drop the temp relations if they exist already in the database {{ drop_relation_if_exists(preexisting_tmp_relation) }} {{ drop_relation_if_exists(preexisting_intermediate_relation) }} {{ drop_relation_if_exists(preexisting_backup_relation) }} {{ run_hooks(pre_hooks) }} {% if existing_relation is none %} {%- call statement('main', language=language) -%} {{ create_table_as(False, target_relation, compiled_code, language) }} {%- endcall -%} {% elif existing_relation.is_view %} {#-- Can't overwrite a view with a table - we must drop --#} {{ log("Dropping relation " ~ target_relation ~ " because it is a view and this model is a table.") }} {% do adapter.drop_relation(existing_relation) %} {%- call statement('main', language=language) -%} {{ create_table_as(False, target_relation, compiled_code, language) }} {%- endcall -%} {% elif full_refresh_mode %} {#-- Create table with given `on_table_exists` mode #} {% do on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) %} {% else %} {#-- Create the temp relation, either as a view or as a temp table --#} {% if tmp_relation_type == 'view' %} {%- call statement('create_tmp_relation') -%} {{ create_view_as(tmp_relation, compiled_code) }} {%- endcall -%} {% else %} {%- call statement('create_tmp_relation', language=language) -%} {{ create_table_as(True, tmp_relation, compiled_code, language) }} {%- endcall -%} {% endif %} {% do adapter.expand_target_column_types( from_relation=tmp_relation, to_relation=target_relation) %} {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %} {% if not dest_columns %} {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %} {% endif %} {#-- Build the sql --#} {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': tmp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} {%- call statement('main') -%} {{ strategy_sql_macro_func(strategy_arg_dict) }} {%- endcall -%} {% endif %} {% do drop_relation_if_exists(tmp_relation) %} {{ run_hooks(post_hooks) }} {% set should_revoke = should_revoke(existing_relation.is_table, full_refresh_mode) %} {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} {% do persist_docs(target_relation, model) %} {{ return({'relations': [target_relation]}) }} {%- endmaterialization %} {% macro trino__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} {% if unique_key %} {% if unique_key is sequence and unique_key is not string %} delete from {{ target }} where exists ( select 1 from {{ source }} where {% for key in unique_key %} {{ target }}.{{ key }} = {{ source }}.{{ key }} {{ "and " if not loop.last }} {% endfor %} ) {% if incremental_predicates %} {% for predicate in incremental_predicates %} and {{ predicate }} {% endfor %} {% endif %} ; {% else %} delete from {{ target }} where ( {{ unique_key }}) in ( select {{ unique_key }} from {{ source }} ) {%- if incremental_predicates %} {% for predicate in incremental_predicates %} and {{ predicate }} {% endfor %} {%- endif -%}; {% endif %} {% endif %} insert into {{ target }} ({{ dest_cols_csv }}) ( select {{ dest_cols_csv }} from {{ source }} ) {%- endmacro %} {% macro trino__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%} {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} {%- set dest_cols_csv_source = dest_cols_csv.split(', ') -%} {%- set merge_update_columns = config.get('merge_update_columns') -%} {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%} {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%} {%- set sql_header = config.get('sql_header', none) -%} {% if unique_key %} {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %} {% for key in unique_key %} {% set this_key_match %} DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }} {% endset %} {% do predicates.append(this_key_match) %} {% endfor %} {% else %} {% set unique_key_match %} DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }} {% endset %} {% do predicates.append(unique_key_match) %} {% endif %} {{ sql_header if sql_header is not none }} merge into {{ target }} as DBT_INTERNAL_DEST using {{ source }} as DBT_INTERNAL_SOURCE on {{"(" ~ predicates | join(") and (") ~ ")"}} {% if unique_key %} when matched then update set {% for column_name in update_columns -%} {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }} {%- if not loop.last %}, {%- endif %} {%- endfor %} {% endif %} when not matched then insert ({{ dest_cols_csv }}) values ({% for dest_cols in dest_cols_csv_source -%} DBT_INTERNAL_SOURCE.{{ dest_cols }} {%- if not loop.last %}, {% endif %} {%- endfor %}) {% else %} insert into {{ target }} ({{ dest_cols_csv }}) ( select {{ dest_cols_csv }} from {{ source }} ) {% endif %} {% endmacro %} {% macro trino__get_incremental_microbatch_sql(arg_dict) %} {%- set target = arg_dict["target_relation"] -%} {%- set source = arg_dict["temp_relation"] -%} {%- set dest_columns = arg_dict["dest_columns"] -%} {%- set incremental_predicates = [] if arg_dict.get('incremental_predicates') is none else arg_dict.get('incremental_predicates') -%} {#-- Add additional incremental_predicates to filter for batch --#} {% if model.config.get("__dbt_internal_microbatch_event_time_start") -%} {% do incremental_predicates.append(model.config.event_time ~ " >= TIMESTAMP '" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "'") %} {% endif %} {% if model.config.get("__dbt_internal_microbatch_event_time_end") -%} {% do incremental_predicates.append(model.config.event_time ~ " < TIMESTAMP '" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "'") %} {% endif %} {% do arg_dict.update({'incremental_predicates': incremental_predicates}) %} delete from {{ target }} where ( {% for predicate in incremental_predicates %} {%- if not loop.first %}and {% endif -%} {{ predicate }} {% endfor %} ); {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} insert into {{ target }} ({{ dest_cols_csv }}) ( select {{ dest_cols_csv }} from {{ source }} ) {% endmacro %} ================================================ FILE: dbt/include/trino/macros/materializations/materialized_view.sql ================================================ {%- macro trino__get_create_materialized_view_as_sql(target_relation, sql) -%} create materialized view {{ target_relation }} {%- set grace_period = config.get('grace_period') %} {%- if grace_period is not none %} grace period {{ grace_period }} {%- endif %} {{ properties() }} as {{ sql }} ; {%- endmacro -%} {% macro trino__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} {{- trino__get_create_materialized_view_as_sql(intermediate_relation, sql) }} {% if existing_relation is not none %} {{ log("Found a " ~ existing_relation.type ~ " with same name. Will drop it", info=true) }} alter {{ existing_relation.type|replace("_", " ") }} {{ existing_relation }} rename to {{ backup_relation }}; {% endif %} alter materialized view {{ intermediate_relation }} rename to {{ relation }}; {% endmacro %} {#-- Applying materialized view configuration changes via alter is not supported. --#} {#-- Return None, so `refresh_materialized_view` macro is invoked even --#} {#-- if materialized view configuration changes are made. --#} {#-- After configuration change, full refresh needs to be performed on mv. --#} {% macro trino__get_materialized_view_configuration_changes(existing_relation, new_config) %} {% do return(None) %} {% endmacro %} {%- macro trino__refresh_materialized_view(relation) -%} refresh materialized view {{ relation }} {%- endmacro -%} ================================================ FILE: dbt/include/trino/macros/materializations/seeds/helpers.sql ================================================ {% macro trino__get_batch_size() %} {{ return(1000) }} {% endmacro %} {% macro create_bindings(row, types) %} {% set values = [] %} {% set re = modules.re %} {%- for item in row -%} {%- set type = types[loop.index0] -%} {%- set match_type = re.match("(\w+)(\(.*\))?", type) -%} {%- if item is not none and item is string and 'interval' in match_type.group(1) -%} {%- do values.append((none, match_type.group(1).upper() ~ " " ~ item)) -%} {%- elif item is not none and item is string and 'varchar' not in type.lower() -%} {%- do values.append((none, match_type.group(1).upper() ~ " '" ~ item ~ "'")) -%} {%- elif item is not none and 'varchar' in type.lower() -%} {%- do values.append((get_binding_char(), item|string())) -%} {%- else -%} {%- do values.append((get_binding_char(), item)) -%} {% endif -%} {%- endfor -%} {{ return(values) }} {% endmacro %} {# We need to override the default__load_csv_rows macro as Trino requires values to be typed according to the column type as in following example: create table "memory"."default"."string_type" ("varchar_example" varchar,"varchar_n_example" varchar(10),"char_example" char,"char_n_example" char(10),"varbinary_example" varbinary,"json_example" json) insert into "memory"."default"."string_type" ("varchar_example", "varchar_n_example", "char_example", "char_n_example", "varbinary_example", "json_example") values ('test','abc',CHAR 'd',CHAR 'ghi',VARBINARY '65683F',JSON '{"k1":1,"k2":23,"k3":456}'),(NULL,NULL,NULL,NULL,NULL,NULL) Usually seed row's values through agate_table's data type detection and come through as python types, in this case typing is handled by using bindings in `ConnectionWrapper.execute`. However dbt also allows you to override the data types of the created table through setting `column_types`, this case is handled here where we have the type information of the seed table. #} {% macro trino__load_csv_rows(model, agate_table) %} {% set column_override = model['config'].get('column_types', {}) %} {% set types = [] %} {%- for col_name in agate_table.column_names -%} {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%} {%- set type = column_override.get(col_name, inferred_type) -%} {%- do types.append(type) -%} {%- endfor -%} {% set batch_size = get_batch_size() %} {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} {% set bindings = [] %} {% set statements = [] %} {% for chunk in agate_table.rows | batch(batch_size) %} {% set bindings = [] %} {% set sql %} insert into {{ this.render() }} ({{ cols_sql }}) values {% for row in chunk -%} ({%- for tuple in create_bindings(row, types) -%} {%- if tuple.0 is not none -%} {{ tuple.0 }} {%- do bindings.append(tuple.1) -%} {%- else -%} {{ tuple.1 }} {%- endif -%} {%- if not loop.last%},{%- endif %} {%- endfor -%}) {%- if not loop.last%},{%- endif %} {%- endfor %} {% endset %} {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} {% if loop.index0 == 0 %} {% do statements.append(sql) %} {% endif %} {% endfor %} {# Return SQL so we can render it out into the compiled files #} {{ return(statements[0]) }} {% endmacro %} ================================================ FILE: dbt/include/trino/macros/materializations/snapshot.sql ================================================ {% materialization snapshot, adapter='trino' %} {% if config.get('properties') %} {% if config.get('properties').get('location') %} {%- do exceptions.raise_compiler_error("Specifying 'location' property in snapshots is not supported.") -%} {% endif %} {% endif %} {{ return(materialization_snapshot_default()) }} {% endmaterialization %} {% macro trino__snapshot_hash_arguments(args) -%} lower(to_hex(md5(to_utf8(concat({%- for arg in args -%} coalesce(cast({{ arg }} as varchar), ''){% if not loop.last %}, '|',{% endif -%} {%- endfor -%} ))))) {%- endmacro %} {% macro trino__post_snapshot(staging_relation) %} -- Clean up the snapshot temp table {% do drop_relation(staging_relation) %} {% endmacro %} {% macro trino__snapshot_merge_sql(target, source, insert_cols) -%} {%- set insert_cols_csv = insert_cols | join(', ') -%} {%- set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() -%} merge into {{ target.render() }} as DBT_INTERNAL_DEST using {{ source }} as DBT_INTERNAL_SOURCE on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }} when matched {% if config.get("dbt_valid_to_current") %} and (DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null) {% else %} and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null {% endif %} and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') then update set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} when not matched and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' then insert ({{ insert_cols_csv }}) values ({% for insert_col in insert_cols -%} DBT_INTERNAL_SOURCE.{{ insert_col }} {%- if not loop.last %}, {% endif %} {%- endfor %}) {% endmacro %} ================================================ FILE: dbt/include/trino/macros/materializations/table.sql ================================================ {% materialization table, adapter = 'trino' %} {%- set on_table_exists = config.get('on_table_exists', 'rename') -%} {% if on_table_exists not in ['rename', 'drop', 'replace', 'skip'] %} {%- set log_message = 'Invalid value for on_table_exists (%s) specified. Setting default value (%s).' % (on_table_exists, 'rename') -%} {% do log(log_message) %} {%- set on_table_exists = 'rename' -%} {% endif %} {%- set existing_relation = load_cached_relation(this) -%} {%- set target_relation = this.incorporate(type='table') %} {% if on_table_exists == 'rename' %} {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} -- the intermediate_relation should not already exist in the database; get_relation -- will return None in that case. Otherwise, we get a relation that we can drop -- later, before we try to use this name for the current operation {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} -- as above, the backup_relation should not already exist {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} -- drop the temp relations if they exist already in the database {{ drop_relation_if_exists(preexisting_intermediate_relation) }} {{ drop_relation_if_exists(preexisting_backup_relation) }} {% endif %} {{ run_hooks(pre_hooks) }} -- grab current tables grants config for comparision later on {% set grant_config = config.get('grants') %} {#-- Create table with given `on_table_exists` mode #} {% do on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) %} {% do persist_docs(target_relation, model) %} {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} {{ run_hooks(post_hooks) }} {{ return({'relations': [target_relation]}) }} {% endmaterialization %} {% macro on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) -%} {#-- Create table with given `on_table_exists` mode #} {% if on_table_exists == 'rename' %} {#-- table does not exists #} {% if existing_relation is none %} {% call statement('main') -%} {{ create_table_as(False, target_relation, sql) }} {%- endcall %} {#-- table does exists #} {% else %} {#-- build modeldock #} {% call statement('main') -%} {{ create_table_as(False, intermediate_relation, sql) }} {%- endcall %} {#-- cleanup #} {{ adapter.rename_relation(existing_relation, backup_relation) }} {{ adapter.rename_relation(intermediate_relation, target_relation) }} {#-- finally, drop the existing/backup relation after the commit #} {{ drop_relation_if_exists(backup_relation) }} {% endif %} {% elif on_table_exists == 'drop' %} {#-- cleanup #} {%- if existing_relation is not none -%} {{ adapter.drop_relation(existing_relation) }} {%- endif -%} {#-- build model #} {% call statement('main') -%} {{ create_table_as(False, target_relation, sql) }} {%- endcall %} {% elif on_table_exists == 'replace' %} {#-- build model #} {% call statement('main') -%} {{ create_table_as(False, target_relation, sql, 'replace') }} {%- endcall %} {% elif on_table_exists == 'skip' %} {#-- build model #} {% call statement('main') -%} {{ create_table_as(False, target_relation, sql, 'skip') }} {%- endcall %} {% endif %} {% endmacro %} ================================================ FILE: dbt/include/trino/macros/materializations/view.sql ================================================ {% materialization view, adapter='trino' -%} {% set to_return = create_or_replace_view() %} {% set target_relation = this.incorporate(type='view') %} {% do persist_docs(target_relation, model) %} {% do return(to_return) %} {%- endmaterialization %} ================================================ FILE: dbt/include/trino/macros/utils/any_value.sql ================================================ {% macro trino__any_value(expression) -%} min({{ expression }}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/array_append.sql ================================================ {% macro trino__array_append(array, new_element) -%} {{ array_concat(array, array_construct([new_element])) }} {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/array_concat.sql ================================================ {% macro trino__array_concat(array_1, array_2) -%} concat({{ array_1 }}, {{ array_2 }}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/array_construct.sql ================================================ {% macro trino__array_construct(inputs, data_type) -%} {%- if not inputs -%} null {%- else -%} array[ {{ inputs|join(' , ') }} ] {%- endif -%} {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/bool_or.sql ================================================ {% macro trino__bool_or(expression) -%} bool_or({{ expression }}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/datatypes.sql ================================================ {% macro trino__type_float() -%} double {%- endmacro %} {% macro trino__type_string() -%} varchar {%- endmacro %} {% macro trino__type_numeric() -%} decimal(28, 6) {%- endmacro %} {%- macro trino__type_int() -%} integer {%- endmacro -%} ================================================ FILE: dbt/include/trino/macros/utils/date_spine.sql ================================================ {% macro trino__date_spine(datepart, start_date, end_date) %} {# call as follows: date_spine( "day", "to_date('01/01/2016', 'mm/dd/yyyy')", "dbt.dateadd(week, 1, current_date)" ) #} with rawdata as ( {{dbt.generate_series( dbt.get_intervals_between(start_date, end_date, datepart) )}} ), all_periods as ( select ( {{ dbt.dateadd( datepart, "row_number() over (order by 1) - 1", "cast(" ~ start_date ~ " as date)" ) }} ) as date_{{datepart}} from rawdata ), filtered as ( select * from all_periods where date_{{datepart}} <= cast({{ end_date }} as date) ) select * from filtered {% endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/date_trunc.sql ================================================ {% macro trino__date_trunc(datepart, date) -%} date_trunc('{{datepart}}', {{date}}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/dateadd.sql ================================================ {% macro trino__dateadd(datepart, interval, from_date_or_timestamp) -%} date_add('{{ datepart }}', {{ interval }}, {{ from_date_or_timestamp }}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/datediff.sql ================================================ {% macro trino__datediff(first_date, second_date, datepart) -%} {%- if datepart == 'year' -%} (year(CAST({{ second_date }} AS TIMESTAMP)) - year(CAST({{ first_date }} AS TIMESTAMP))) {%- elif datepart == 'quarter' -%} ({{ datediff(first_date, second_date, 'year') }} * 4) + quarter(CAST({{ second_date }} AS TIMESTAMP)) - quarter(CAST({{ first_date }} AS TIMESTAMP)) {%- elif datepart == 'month' -%} ({{ datediff(first_date, second_date, 'year') }} * 12) + month(CAST({{ second_date }} AS TIMESTAMP)) - month(CAST({{ first_date }} AS TIMESTAMP)) {%- elif datepart == 'day' -%} ((to_milliseconds((CAST(CAST({{ second_date }} AS TIMESTAMP) AS DATE) - CAST(CAST({{ first_date }} AS TIMESTAMP) AS DATE)))) / 86400000) {%- elif datepart == 'week' -%} ({{ datediff(first_date, second_date, 'day') }} / 7 + case when dow(CAST({{first_date}} AS TIMESTAMP)) <= dow(CAST({{second_date}} AS TIMESTAMP)) then case when {{first_date}} <= {{second_date}} then 0 else -1 end else case when {{first_date}} <= {{second_date}} then 1 else 0 end end) {%- elif datepart == 'hour' -%} ({{ datediff(first_date, second_date, 'day') }} * 24 + hour(CAST({{ second_date }} AS TIMESTAMP)) - hour(CAST({{ first_date }} AS TIMESTAMP))) {%- elif datepart == 'minute' -%} ({{ datediff(first_date, second_date, 'hour') }} * 60 + minute(CAST({{ second_date }} AS TIMESTAMP)) - minute(CAST({{ first_date }} AS TIMESTAMP))) {%- elif datepart == 'second' -%} ({{ datediff(first_date, second_date, 'minute') }} * 60 + second(CAST({{ second_date }} AS TIMESTAMP)) - second(CAST({{ first_date }} AS TIMESTAMP))) {%- elif datepart == 'millisecond' -%} (to_milliseconds((CAST({{ second_date }} AS TIMESTAMP) - CAST({{ first_date }} AS TIMESTAMP)))) {%- else -%} {% if execute %}{{ exceptions.raise_compiler_error("Unsupported datepart for macro datediff in Trino: {!r}".format(datepart)) }}{% endif %} {%- endif -%} {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/hash.sql ================================================ {% macro trino__hash(field) -%} lower(to_hex(md5(to_utf8(cast({{field}} as varchar))))) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/listagg.sql ================================================ {% macro trino__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} {% set collect_list %} array_agg({{ measure }} {% if order_by_clause -%}{{ order_by_clause }}{%- endif %}) {% endset %} {% set limited %} slice({{ collect_list }}, 1, {{ limit_num }}) {% endset %} {% set collected = limited if limit_num else collect_list %} {% set final %} array_join({{ collected }}, {{ delimiter_text }}) {% endset %} {% do return(final) %} {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/right.sql ================================================ {% macro trino__right(string_text, length_expression) %} case when {{ length_expression }} = 0 then '' else substr({{ string_text }}, -1 * ({{ length_expression }})) end {%- endmacro -%} ================================================ FILE: dbt/include/trino/macros/utils/safe_cast.sql ================================================ {% macro trino__safe_cast(field, type) -%} try_cast({{field}} as {{type}}) {%- endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/split_part.sql ================================================ {% macro trino__split_part(string_text, delimiter_text, part_number) %} {% if part_number >= 0 %} {{ dbt.default__split_part(string_text, delimiter_text, part_number) }} {% else %} {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }} {% endif %} {% endmacro %} ================================================ FILE: dbt/include/trino/macros/utils/timestamps.sql ================================================ {% macro trino__current_timestamp() -%} current_timestamp {%- endmacro %} {% macro trino__snapshot_string_as_time(timestamp) %} {%- set result = "timestamp '" ~ timestamp ~ "'" -%} {{ return(result) }} {% endmacro %} ================================================ FILE: dbt/include/trino/sample_profiles.yml ================================================ default: outputs: dev: type: trino method: none # optional, one of {none | ldap | kerberos} user: [dev_user] password: [password] # required if method is ldap or kerberos database: [database name] host: [hostname] port: [port number] schema: [dev_schema] threads: [1 or more] prod: type: trino method: none # optional, one of {none | ldap | kerberos} user: [prod_user] password: [prod_password] # required if method is ldap or kerberos database: [database name] host: [hostname] port: [port number] schema: [prod_schema] threads: [1 or more] target: dev ================================================ FILE: dev_requirements.txt ================================================ dbt-tests-adapter~=1.19.1 mypy==1.19.1 # patch updates have historically introduced breaking changes pre-commit~=4.3 pytest~=8.4 tox~=4.30 ================================================ FILE: docker/init_starburst.bash ================================================ #!/bin/bash # move to wherever we are so docker things work cd "$(dirname "${BASH_SOURCE[0]}")" cd .. set -exo pipefail docker compose -f docker-compose-starburst.yml build docker compose -f docker-compose-starburst.yml up -d --quiet-pull timeout 5m bash -c -- 'while ! docker compose -f docker-compose-starburst.yml logs trino 2>&1 | tail -n 1 | grep "SERVER STARTED"; do sleep 2; done' ================================================ FILE: docker/init_trino.bash ================================================ #!/bin/bash # move to wherever we are so docker things work cd "$(dirname "${BASH_SOURCE[0]}")" cd .. set -exo pipefail docker compose -f docker-compose-trino.yml build docker compose -f docker-compose-trino.yml up -d --quiet-pull timeout 5m bash -c -- 'while ! docker compose -f docker-compose-trino.yml logs trino 2>&1 | tail -n 1 | grep "SERVER STARTED"; do sleep 2; done' ================================================ FILE: docker/remove_starburst.bash ================================================ #!/bin/bash # move to wherever we are so docker things work cd "$(dirname "${BASH_SOURCE[0]}")" cd .. docker compose -f docker-compose-starburst.yml down ================================================ FILE: docker/remove_trino.bash ================================================ #!/bin/bash # move to wherever we are so docker things work cd "$(dirname "${BASH_SOURCE[0]}")" cd .. docker compose -f docker-compose-trino.yml down ================================================ FILE: docker/starburst/catalog/delta.properties ================================================ connector.name=delta-lake delta.enable-non-concurrent-writes=true fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true hive.metastore.uri=thrift://hive-metastore:9083 s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s delta.security=allow-all ================================================ FILE: docker/starburst/catalog/hive.properties ================================================ connector.name=hive hive.metastore.uri=thrift://hive-metastore:9083 fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s hive.security=sql-standard ================================================ FILE: docker/starburst/catalog/iceberg.properties ================================================ connector.name=iceberg hive.metastore.uri=thrift://hive-metastore:9083 fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s iceberg.unique-table-location=true ================================================ FILE: docker/starburst/catalog/memory.properties ================================================ connector.name=memory memory.max-data-per-node=128MB ================================================ FILE: docker/starburst/catalog/postgresql.properties ================================================ connector.name=postgresql connection-url=jdbc:postgresql://postgres:5432/dbt-trino connection-user=dbt-trino connection-password=dbt-trino ================================================ FILE: docker/starburst/catalog/tpch.properties ================================================ connector.name=tpch ================================================ FILE: docker/starburst/etc/config.properties ================================================ coordinator=true node-scheduler.include-coordinator=true http-server.http.port=8080 discovery.uri=http://localhost:8080 ================================================ FILE: docker/starburst/etc/jvm.config ================================================ -server -XX:InitialRAMPercentage=80 -XX:MaxRAMPercentage=80 -XX:G1HeapRegionSize=32M -XX:+ExplicitGCInvokesConcurrent -XX:+HeapDumpOnOutOfMemoryError -XX:+ExitOnOutOfMemoryError -XX:-OmitStackTraceInFastThrow -XX:ReservedCodeCacheSize=256M -XX:PerMethodRecompilationCutoff=10000 -XX:PerBytecodeRecompilationCutoff=10000 -Djdk.attach.allowAttachSelf=true -Djdk.nio.maxCachedBufferSize=2000000 ================================================ FILE: docker/starburst/etc/node.properties ================================================ node.environment=docker node.data-dir=/data/starburst ================================================ FILE: docker/trino/catalog/delta.properties ================================================ connector.name=delta-lake delta.enable-non-concurrent-writes=true fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true hive.metastore.uri=thrift://hive-metastore:9083 s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s ================================================ FILE: docker/trino/catalog/hive.properties ================================================ connector.name=hive hive.metastore.uri=thrift://hive-metastore:9083 fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s hive.security=sql-standard ================================================ FILE: docker/trino/catalog/iceberg.properties ================================================ connector.name=iceberg hive.metastore.uri=thrift://hive-metastore:9083 fs.native-s3.enabled=true s3.region=us-east-1 s3.endpoint=http://minio:9000 s3.path-style-access=true s3.aws-access-key=minio s3.aws-secret-key=minio123 hive.metastore-cache-ttl=0s hive.metastore-refresh-interval=5s ================================================ FILE: docker/trino/catalog/memory.properties ================================================ connector.name=memory memory.max-data-per-node=128MB ================================================ FILE: docker/trino/catalog/postgresql.properties ================================================ connector.name=postgresql connection-url=jdbc:postgresql://postgres:5432/dbt-trino connection-user=dbt-trino connection-password=dbt-trino ================================================ FILE: docker/trino/catalog/tpch.properties ================================================ connector.name=tpch ================================================ FILE: docker/trino/etc/config.properties ================================================ coordinator=true node-scheduler.include-coordinator=true http-server.http.port=8080 discovery.uri=http://localhost:8080 ================================================ FILE: docker/trino/etc/jvm.config ================================================ -server -XX:InitialRAMPercentage=80 -XX:MaxRAMPercentage=80 -XX:G1HeapRegionSize=32M -XX:+ExplicitGCInvokesConcurrent -XX:+HeapDumpOnOutOfMemoryError -XX:+ExitOnOutOfMemoryError -XX:-OmitStackTraceInFastThrow -XX:ReservedCodeCacheSize=256M -XX:PerMethodRecompilationCutoff=10000 -XX:PerBytecodeRecompilationCutoff=10000 -Djdk.attach.allowAttachSelf=true -Djdk.nio.maxCachedBufferSize=2000000 ================================================ FILE: docker/trino/etc/node.properties ================================================ node.environment=docker node.data-dir=/data/trino ================================================ FILE: docker-compose-starburst.yml ================================================ services: trino: ports: - "8080:8080" image: "starburstdata/starburst-enterprise:477-e.1" volumes: - ./docker/starburst/etc:/etc/starburst - ./docker/starburst/catalog:/etc/starburst/catalog environment: - _JAVA_OPTIONS=-Dfile.encoding=UTF-8 postgres: ports: - "5432:5432" image: postgres:18 environment: POSTGRES_USER: dbt-trino POSTGRES_PASSWORD: dbt-trino metastore_db: image: postgres:18 hostname: metastore_db environment: POSTGRES_USER: hive POSTGRES_PASSWORD: hive POSTGRES_DB: metastore hive-metastore: hostname: hive-metastore image: 'starburstdata/hive:3.1.3-e.15' ports: - '9083:9083' # Metastore Thrift environment: HIVE_METASTORE_DRIVER: org.postgresql.Driver HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore HIVE_METASTORE_USER: hive HIVE_METASTORE_PASSWORD: hive HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/ S3_ENDPOINT: http://minio:9000 S3_ACCESS_KEY: minio S3_SECRET_KEY: minio123 S3_PATH_STYLE_ACCESS: "true" REGION: "" GOOGLE_CLOUD_KEY_FILE_PATH: "" AZURE_ADL_CLIENT_ID: "" AZURE_ADL_CREDENTIAL: "" AZURE_ADL_REFRESH_URL: "" AZURE_ABFS_STORAGE_ACCOUNT: "" AZURE_ABFS_ACCESS_KEY: "" AZURE_WASB_STORAGE_ACCOUNT: "" AZURE_ABFS_OAUTH: "" AZURE_ABFS_OAUTH_TOKEN_PROVIDER: "" AZURE_ABFS_OAUTH_CLIENT_ID: "" AZURE_ABFS_OAUTH_SECRET: "" AZURE_ABFS_OAUTH_ENDPOINT: "" AZURE_WASB_ACCESS_KEY: "" HIVE_METASTORE_USERS_IN_ADMIN_ROLE: "admin" depends_on: - metastore_db minio: hostname: minio image: 'minio/minio:RELEASE.2025-09-07T16-13-09Z' container_name: minio ports: - '9000:9000' - '9001:9001' environment: MINIO_ACCESS_KEY: minio MINIO_SECRET_KEY: minio123 command: server /data --console-address ":9001" # This job will create the "datalake" bucket on Minio mc-job: image: 'minio/mc:RELEASE.2025-04-16T18-13-26Z' entrypoint: | /bin/bash -c " sleep 5; /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123; /usr/bin/mc mb --quiet myminio/datalake " depends_on: - minio networks: default: name: dbt-net external: true ================================================ FILE: docker-compose-trino.yml ================================================ services: trino: ports: - "8080:8080" image: "trinodb/trino:478" volumes: - ./docker/trino/etc:/usr/lib/trino/etc:ro - ./docker/trino/catalog:/etc/trino/catalog postgres: ports: - "5432:5432" image: postgres:18 container_name: postgres environment: POSTGRES_USER: dbt-trino POSTGRES_PASSWORD: dbt-trino metastore_db: image: postgres:18 hostname: metastore_db environment: POSTGRES_USER: hive POSTGRES_PASSWORD: hive POSTGRES_DB: metastore hive-metastore: hostname: hive-metastore image: 'starburstdata/hive:3.1.3-e.15' ports: - '9083:9083' # Metastore Thrift environment: HIVE_METASTORE_DRIVER: org.postgresql.Driver HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore HIVE_METASTORE_USER: hive HIVE_METASTORE_PASSWORD: hive HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/ S3_ENDPOINT: http://minio:9000 S3_ACCESS_KEY: minio S3_SECRET_KEY: minio123 S3_PATH_STYLE_ACCESS: "true" REGION: "" GOOGLE_CLOUD_KEY_FILE_PATH: "" AZURE_ADL_CLIENT_ID: "" AZURE_ADL_CREDENTIAL: "" AZURE_ADL_REFRESH_URL: "" AZURE_ABFS_STORAGE_ACCOUNT: "" AZURE_ABFS_ACCESS_KEY: "" AZURE_WASB_STORAGE_ACCOUNT: "" AZURE_ABFS_OAUTH: "" AZURE_ABFS_OAUTH_TOKEN_PROVIDER: "" AZURE_ABFS_OAUTH_CLIENT_ID: "" AZURE_ABFS_OAUTH_SECRET: "" AZURE_ABFS_OAUTH_ENDPOINT: "" AZURE_WASB_ACCESS_KEY: "" HIVE_METASTORE_USERS_IN_ADMIN_ROLE: "admin" depends_on: - metastore_db minio: hostname: minio image: 'minio/minio:RELEASE.2025-09-07T16-13-09Z' container_name: minio ports: - '9000:9000' - '9001:9001' environment: MINIO_ACCESS_KEY: minio MINIO_SECRET_KEY: minio123 command: server /data --console-address ":9001" # This job will create the "datalake" bucket on Minio mc-job: image: 'minio/mc:RELEASE.2025-04-16T18-13-26Z' entrypoint: | /bin/bash -c " sleep 5; /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123; /usr/bin/mc mb --quiet myminio/datalake " depends_on: - minio networks: default: name: dbt-net external: true ================================================ FILE: mypy.ini ================================================ [mypy] namespace_packages = True explicit_package_bases = True ================================================ FILE: pytest.ini ================================================ [pytest] filterwarnings = ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning ignore:unclosed file .*:ResourceWarning testpaths = tests/unit tests/functional markers = delta iceberg hive postgresql prepared_statements_disabled skip_profile(profile) ================================================ FILE: setup.py ================================================ #!/usr/bin/env python import os import re import sys # require python 3.9 or newer if sys.version_info < (3, 9): print("Error: dbt does not support this version of Python.") print("Please upgrade to Python 3.9 or higher.") sys.exit(1) # require version of setuptools that supports find_namespace_packages from setuptools import setup try: from setuptools import find_namespace_packages except ImportError: # the user has a downlevel version of setuptools. print("Error: dbt requires setuptools v40.1.0 or higher.") print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") sys.exit(1) this_directory = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(this_directory, "README.md")) as f: long_description = f.read() package_name = "dbt-trino" # get this package's version from dbt/adapters//__version__.py def _get_plugin_version_dict(): _version_path = os.path.join(this_directory, "dbt", "adapters", "trino", "__version__.py") _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}["']"""
    with open(_version_path) as f:
        match = re.search(_version_pattern, f.read().strip())
        if match is None:
            raise ValueError(f"invalid version at {_version_path}")
        return match.groupdict()


def _dbt_trino_version():
    parts = _get_plugin_version_dict()
    trino_version = "{major}.{minor}.{patch}".format(**parts)
    if parts["prekind"] and parts["pre"]:
        trino_version += parts["prekind"] + parts["pre"]
    return trino_version


package_version = _dbt_trino_version()
description = """The trino adapter plugin for dbt (data build tool)"""

setup(
    name=package_name,
    version=package_version,
    description=description,
    long_description=long_description,
    long_description_content_type="text/markdown",
    platforms="any",
    license="Apache License 2.0",
    license_files=("LICENSE.txt",),
    author="Starburst Data",
    author_email="info@starburstdata.com",
    url="https://github.com/starburstdata/dbt-trino",
    packages=find_namespace_packages(include=["dbt", "dbt.*"]),
    package_data={
        "dbt": [
            "include/trino/dbt_project.yml",
            "include/trino/sample_profiles.yml",
            "include/trino/macros/*.sql",
            "include/trino/macros/*/*.sql",
            "include/trino/macros/*/*/*.sql",
        ]
    },
    install_requires=[
        "dbt-common>=1.25.0,<2.0",
        "dbt-adapters>=1.16,<2.0",
        "trino~=0.331",
        # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
        "dbt-core>=1.8.0",
    ],
    zip_safe=False,
    classifiers=[
        "Development Status :: 5 - Production/Stable",
        "License :: OSI Approved :: Apache Software License",
        "Operating System :: Microsoft :: Windows",
        "Operating System :: MacOS :: MacOS X",
        "Operating System :: POSIX :: Linux",
        "Programming Language :: Python :: 3.9",
        "Programming Language :: Python :: 3.10",
        "Programming Language :: Python :: 3.11",
        "Programming Language :: Python :: 3.12",
        "Programming Language :: Python :: 3.13",
    ],
    python_requires=">=3.9",
)


================================================
FILE: tests/conftest.py
================================================
import os

import pytest
import trino

# Import the functional fixtures as a plugin
# Note: fixtures with session scope need to be local

pytest_plugins = ["dbt.tests.fixtures.project"]


def pytest_addoption(parser):
    parser.addoption("--profile", action="store", default="trino_starburst", type=str)


# Skip tests for profiles marked with @pytest.mark.skip_profile
# See pytest docs for skipping based on command-line options:
# https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
def pytest_collection_modifyitems(config, items):
    profile_type = config.getoption("--profile")
    for item in items:
        if skip_profile_marker := item.get_closest_marker("skip_profile"):
            if profile_type in skip_profile_marker.args:
                skip_profile = pytest.mark.skip(reason=f"skipped on {profile_type} profile")
                item.add_marker(skip_profile)


# The profile dictionary, used to write out profiles.yml
@pytest.fixture(scope="class")
def dbt_profile_target(request):
    profile_type = request.config.getoption("--profile")
    if profile_type == "trino_starburst":
        target = get_trino_starburst_target()
    elif profile_type == "starburst_galaxy":
        target = get_galaxy_target()
    else:
        raise ValueError(f"Invalid profile type '{profile_type}'")

    prepared_statements_disabled = request.node.get_closest_marker("prepared_statements_disabled")
    if prepared_statements_disabled:
        target.update({"prepared_statements_enabled": False})

    postgresql = request.node.get_closest_marker("postgresql")
    iceberg = request.node.get_closest_marker("iceberg")
    delta = request.node.get_closest_marker("delta")
    hive = request.node.get_closest_marker("hive")

    if sum(bool(x) for x in (postgresql, iceberg, delta)) > 1:
        raise ValueError("Only one of postgresql, iceberg, delta can be specified as a marker")

    if postgresql:
        target.update({"catalog": "postgresql"})

    if delta:
        target.update({"catalog": "delta"})

    if iceberg:
        target.update({"catalog": "iceberg"})

    if hive:
        target.update({"catalog": "hive"})

    return target


def get_trino_starburst_target():
    return {
        "type": "trino",
        "method": "none",
        "threads": 4,
        "host": "localhost",
        "port": 8080,
        "user": "admin",
        "password": "",
        "roles": {
            "hive": "admin",
        },
        "catalog": "memory",
        "schema": "default",
        "timezone": "UTC",
    }


def get_galaxy_target():
    return {
        "type": "trino",
        "method": "ldap",
        "threads": 4,
        "retries": 5,
        "host": os.environ.get("DBT_TESTS_STARBURST_GALAXY_HOST"),
        "port": 443,
        "user": os.environ.get("DBT_TESTS_STARBURST_GALAXY_USER"),
        "password": os.environ.get("DBT_TESTS_STARBURST_GALAXY_PASSWORD"),
        "catalog": "iceberg",
        "schema": "default",
        "timezone": "UTC",
    }


@pytest.fixture(scope="class")
def trino_connection(dbt_profile_target):
    if dbt_profile_target["method"] == "ldap":
        return trino.dbapi.connect(
            host=dbt_profile_target["host"],
            port=dbt_profile_target["port"],
            auth=trino.auth.BasicAuthentication(
                dbt_profile_target["user"], dbt_profile_target["password"]
            ),
            catalog=dbt_profile_target["catalog"],
            schema=dbt_profile_target["schema"],
            http_scheme="https",
        )
    else:
        return trino.dbapi.connect(
            host=dbt_profile_target["host"],
            port=dbt_profile_target["port"],
            user=dbt_profile_target["user"],
            catalog=dbt_profile_target["catalog"],
            schema=dbt_profile_target["schema"],
        )


def get_engine_type(trino_connection):
    conn = trino_connection
    if "galaxy.starburst.io" in conn.host:
        return "starburst_galaxy"
    cur = conn.cursor()
    cur.execute("SELECT version()")
    version = cur.fetchone()
    if "-e" in version[0]:
        return "starburst_enterprise"
    else:
        return "trino"


@pytest.fixture(autouse=True)
def skip_by_engine_type(request, trino_connection):
    engine_type = get_engine_type(trino_connection)
    if request.node.get_closest_marker("skip_engine"):
        for skip_engine_type in request.node.get_closest_marker("skip_engine").args:
            if skip_engine_type == engine_type:
                pytest.skip(f"skipped on {engine_type} engine")


================================================
FILE: tests/functional/adapter/behavior_flags/test_require_certificate_validation.py
================================================
import warnings

import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture
from urllib3.exceptions import InsecureRequestWarning


class TestRequireCertificateValidationDefault:
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"flags": {}}

    def test_cert_default_value(self, project):
        assert project.adapter.connections.profile.credentials.cert is None

    def test_require_certificate_validation_logs(self, project):
        dbt_args = ["show", "--inline", "select 1"]
        _, logs = run_dbt_and_capture(dbt_args)
        assert "It is strongly advised to enable `require_certificate_validation` flag" in logs

    @pytest.mark.skip_profile("trino_starburst")
    def test_require_certificate_validation_insecure_request_warning(self, project):
        with warnings.catch_warnings(record=True) as w:
            dbt_args = ["show", "--inline", "select 1"]
            run_dbt(dbt_args)

            # Check if any InsecureRequestWarning was raised
            assert any(
                issubclass(warning.category, InsecureRequestWarning) for warning in w
            ), "InsecureRequestWarning was not raised"


class TestRequireCertificateValidationFalse:
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"flags": {"require_certificate_validation": False}}

    def test_cert_default_value(self, project):
        assert project.adapter.connections.profile.credentials.cert is None

    def test_require_certificate_validation_logs(self, project):
        dbt_args = ["show", "--inline", "select 1"]
        _, logs = run_dbt_and_capture(dbt_args)
        assert "It is strongly advised to enable `require_certificate_validation` flag" in logs

    @pytest.mark.skip_profile("trino_starburst")
    def test_require_certificate_validation_insecure_request_warning(self, project):
        with warnings.catch_warnings(record=True) as w:
            dbt_args = ["show", "--inline", "select 1"]
            run_dbt(dbt_args)

            # Check if any InsecureRequestWarning was raised
            assert any(
                issubclass(warning.category, InsecureRequestWarning) for warning in w
            ), "InsecureRequestWarning was not raised"


class TestRequireCertificateValidationTrue:
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"flags": {"require_certificate_validation": True}}

    def test_cert_default_value(self, project):
        assert project.adapter.connections.profile.credentials.cert is True

    def test_require_certificate_validation_logs(self, project):
        dbt_args = ["show", "--inline", "select 1"]
        _, logs = run_dbt_and_capture(dbt_args)
        assert "It is strongly advised to enable `require_certificate_validation` flag" not in logs

    @pytest.mark.skip_profile("trino_starburst")
    def test_require_certificate_validation_insecure_request_warning(self, project):
        with warnings.catch_warnings(record=True) as w:
            dbt_args = ["show", "--inline", "select 1"]
            run_dbt(dbt_args)

            # Check if not any InsecureRequestWarning was raised
            assert not any(
                issubclass(warning.category, InsecureRequestWarning) for warning in w
            ), "InsecureRequestWarning was not raised"


================================================
FILE: tests/functional/adapter/catalog_integrations/fixtures.py
================================================
MODEL_WITHOUT_CATALOG = """
{{ config(
    materialized='table',
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog'
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    table_format='delta',
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    file_format='parquet',
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_LOCATION = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    storage_uri='s3://datalake/storage_uri',
    properties= {
        'location': "'s3://datalake/location'",
    }
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_STORAGE_URI = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    storage_uri='s3://datalake/storage_uri',
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    base_location_root='foo',
    base_location_subpath='bar',
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    base_location_root=None,
) }}

select 1 as id, 'test' as name
"""

MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT = """
{{ config(
    materialized='table',
    catalog_name='test_trino_catalog',
    base_location_root=None,
    omit_base_location_root=true,
) }}

select 1 as id, 'test' as name
"""


================================================
FILE: tests/functional/adapter/catalog_integrations/test_catalog_integration.py
================================================
import pytest
from dbt.tests.adapter.catalog_integrations.test_catalog_integration import (
    BaseCatalogIntegrationValidation,
)
from dbt.tests.util import run_dbt_and_capture, write_file

from tests.functional.adapter.catalog_integrations.fixtures import (
    MODEL_WITH_CATALOG,
    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION,
    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE,
    MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT,
    MODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT,
    MODEL_WITH_CATALOG_CONFIGS_LOCATION,
    MODEL_WITH_CATALOG_CONFIGS_STORAGE_URI,
    MODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT,
    MODEL_WITHOUT_CATALOG,
)


@pytest.mark.iceberg
class TestTrinoCatalogIntegrationFileFormat(BaseCatalogIntegrationValidation):
    @pytest.fixture(scope="class")
    def catalogs(self):
        return {
            "catalogs": [
                {
                    "name": "test_trino_catalog",
                    "active_write_integration": "trino_integration",
                    "write_integrations": [
                        {
                            "name": "trino_integration",
                            "catalog_type": "trino",
                            "file_format": "orc",
                        }
                    ],
                }
            ]
        }

    def test_model_without_catalog(self, project):
        # Create model with catalog configuration
        write_file(MODEL_WITHOUT_CATALOG, project.project_root, "models", "test_model.sql")
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" not in logs

    def test_model_with_catalog(self, project):
        # Create model with catalog configuration
        write_file(MODEL_WITH_CATALOG, project.project_root, "models", "test_model.sql")
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "format = 'orc'" in logs

    def test_model_with_catalog_configs_file_format(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_FILE_FORMAT,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "format = 'parquet'" in logs


@pytest.mark.iceberg
# Setting `type` property is available only in Starburst Galaxy
# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html
@pytest.mark.skip_profile("trino_starburst")
class TestMyAdapterCatalogIntegration(BaseCatalogIntegrationValidation):
    @pytest.fixture(scope="class")
    def catalogs(self):
        return {
            "catalogs": [
                {
                    "name": "test_trino_catalog",
                    "active_write_integration": "trino_integration",
                    "write_integrations": [
                        {
                            "name": "trino_integration",
                            "catalog_type": "trino",
                            "table_format": "iceberg",
                        }
                    ],
                }
            ]
        }

    def test_model_with_catalog(self, project):
        # Create model with catalog configuration
        write_file(MODEL_WITH_CATALOG, project.project_root, "models", "test_model.sql")
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "type = 'iceberg'" in logs

    def test_model_with_catalog_configs_table_format(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_TABLE_FORMAT,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "type = 'delta'" in logs


@pytest.mark.iceberg
@pytest.mark.skip_profile("starburst_galaxy")
class TestTrinoCatalogIntegrationLocation(BaseCatalogIntegrationValidation):
    @pytest.fixture(scope="class")
    def catalogs(self):
        return {
            "catalogs": [
                {
                    "name": "test_trino_catalog",
                    "active_write_integration": "trino_integration",
                    "write_integrations": [
                        {
                            "name": "trino_integration",
                            "catalog_type": "trino",
                            "external_volume": "s3://datalake",
                        }
                    ],
                }
            ]
        }

    def test_model_with_catalog(self, project):
        # Create model with catalog configuration
        write_file(MODEL_WITH_CATALOG, project.project_root, "models", "test_model.sql")
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert f"location = 's3://datalake/_dbt/{project.test_schema}/test_model'" in logs

    def test_model_with_catalog_configs_location(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_LOCATION, project.project_root, "models", "test_model.sql"
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "location = 's3://datalake/location'" in logs

    def test_model_with_catalog_configs_storage_uri(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_STORAGE_URI,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert "location = 's3://datalake/storage_uri'" in logs

    def test_model_with_catalog_configs_base_location(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert f"location = 's3://datalake/foo/{project.test_schema}/test_model/bar'" in logs

    def test_model_with_catalog_configs_base_location_none(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert f"location = 's3://datalake/_dbt/{project.test_schema}/test_model'" in logs

    def test_model_with_catalog_configs_base_location_none_omit_base_location_root(self, project):
        # Create model with catalog configuration
        write_file(
            MODEL_WITH_CATALOG_CONFIGS_BASE_LOCATION_NONE_OMIT_BASE_LOCATION_ROOT,
            project.project_root,
            "models",
            "test_model.sql",
        )
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "CREATE TABLE" in logs
        assert "WITH (" in logs
        assert f"location = 's3://datalake/{project.test_schema}/test_model'" in logs


================================================
FILE: tests/functional/adapter/column_types/fixtures.py
================================================
model_sql = """
select
    cast(0 as tinyint) as tinyint_col,
    cast(1 as smallint) as smallint_col,
    cast(2 as integer) as integer_col,
    cast(2 as int) as int_col,
    cast(3 as bigint) as bigint_col,
    cast(4.0 as real) as real_col,
    cast(5.0 as double) as double_col,
    cast(5.5 as double precision) as double_precision_col,
    cast(6.0 as decimal) as decimal_col,
    cast('7' as char) as char_col,
    cast('8' as varchar(20)) as varchar_col
"""

schema_yml = """
version: 2
models:
  - name: model
    tests:
      - is_type:
          column_map:
            tinyint_col: ['integer', 'number']
            smallint_col: ['integer', 'number']
            integer_col: ['integer', 'number']
            int_col: ['integer', 'number']
            bigint_col: ['integer', 'number']
            real_col: ['float', 'number']
            double_col: ['float', 'number']
            double_precision_col: ['float', 'number']
            decimal_col: ['numeric', 'number']
            char_col: ['string', 'not number']
            varchar_col: ['string', 'not number']
"""


================================================
FILE: tests/functional/adapter/column_types/test_column_types.py
================================================
import pytest
from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes

from tests.functional.adapter.column_types.fixtures import model_sql, schema_yml


class TestTrinoColumnTypes(BaseColumnTypes):
    @pytest.fixture(scope="class")
    def models(self):
        return {"model.sql": model_sql, "schema.yml": schema_yml}

    def test_run_and_test(self, project):
        self.run_and_test()


================================================
FILE: tests/functional/adapter/constraints/fixtures.py
================================================
trino_model_contract_sql_header_sql = """
{{
  config(
    materialized = "table"
  )
}}

{% call set_sql_header(config) %}
set time zone 'Asia/Kolkata';
{%- endcall %}
select current_timezone() as column_name
"""

trino_model_incremental_contract_sql_header_sql = """
{{
  config(
    materialized = "incremental",
    on_schema_change="append_new_columns"
  )
}}

{% call set_sql_header(config) %}
set time zone 'Asia/Kolkata';
{%- endcall %}
select current_timezone() as column_name
"""

trino_model_schema_yml = """
version: 2
models:
  - name: my_model
    config:
      contract:
        enforced: true
    columns:
      - name: id
        quote: true
        data_type: integer
        description: hello
        constraints:
          - type: not_null
          - type: check
            expression: (id > 0)
        tests:
          - unique
      - name: color
        data_type: varchar
      - name: date_day
        data_type: varchar
  - name: my_model_error
    config:
      contract:
        enforced: true
    columns:
      - name: id
        data_type: integer
        description: hello
        constraints:
          - type: not_null
          - type: check
            expression: (id > 0)
        tests:
          - unique
      - name: color
        data_type: varchar
      - name: date_day
        data_type: varchar
  - name: my_model_wrong_order
    config:
      contract:
        enforced: true
    columns:
      - name: id
        data_type: integer
        description: hello
        constraints:
          - type: not_null
          - type: check
            expression: (id > 0)
        tests:
          - unique
      - name: color
        data_type: varchar
      - name: date_day
        data_type: varchar
  - name: my_model_wrong_name
    config:
      contract:
        enforced: true
    columns:
      - name: id
        data_type: integer
        description: hello
        constraints:
          - type: not_null
          - type: check
            expression: (id > 0)
        tests:
          - unique
      - name: color
        data_type: varchar
      - name: date_day
        data_type: varchar
"""

trino_constrained_model_schema_yml = """
version: 2
models:
  - name: my_model
    config:
      contract:
        enforced: true
    constraints:
      - type: check
        expression: (id > 0)
      - type: primary_key
        columns: [ id ]
      - type: unique
        columns: [ color, date_day ]
        name: strange_uniqueness_requirement
    columns:
      - name: id
        quote: true
        data_type: integer
        description: hello
        constraints:
          - type: not_null
        tests:
          - unique
      - name: color
        data_type: varchar
      - name: date_day
        data_type: varchar
"""

trino_model_quoted_column_schema_yml = """
version: 2
models:
  - name: my_model
    config:
      contract:
        enforced: true
      materialized: table
    constraints:
      - type: check
        # this one is the on the user
        expression: ("from" = 'blue')
        columns: [ '"from"' ]
    columns:
      - name: id
        data_type: integer
        description: hello
        constraints:
          - type: not_null
        tests:
          - unique
      - name: from  # reserved word
        quote: true
        data_type: varchar
        constraints:
          - type: not_null
      - name: date_day
        data_type: varchar
"""

trino_model_contract_header_schema_yml = """
version: 2
models:
  - name: my_model_contract_sql_header
    config:
      contract:
        enforced: true
    columns:
      - name: column_name
        data_type: varchar
"""


================================================
FILE: tests/functional/adapter/constraints/test_constraints.py
================================================
import pytest
from dbt.tests.adapter.constraints.fixtures import (
    my_incremental_model_sql,
    my_model_incremental_wrong_name_sql,
    my_model_incremental_wrong_order_sql,
    my_model_sql,
    my_model_view_wrong_name_sql,
    my_model_view_wrong_order_sql,
    my_model_with_quoted_column_name_sql,
    my_model_wrong_name_sql,
    my_model_wrong_order_sql,
)
from dbt.tests.adapter.constraints.test_constraints import (
    BaseConstraintQuotedColumn,
    BaseConstraintsRollback,
    BaseConstraintsRuntimeDdlEnforcement,
    BaseIncrementalConstraintsColumnsEqual,
    BaseIncrementalConstraintsRollback,
    BaseIncrementalConstraintsRuntimeDdlEnforcement,
    BaseIncrementalContractSqlHeader,
    BaseModelConstraintsRuntimeEnforcement,
    BaseTableConstraintsColumnsEqual,
    BaseTableContractSqlHeader,
    BaseViewConstraintsColumnsEqual,
)

from tests.functional.adapter.constraints.fixtures import (
    trino_constrained_model_schema_yml,
    trino_model_contract_header_schema_yml,
    trino_model_contract_sql_header_sql,
    trino_model_incremental_contract_sql_header_sql,
    trino_model_quoted_column_schema_yml,
    trino_model_schema_yml,
)

_expected_sql_trino = """
create table  (
    "id" integer not null,
    color varchar,
    date_day varchar
) ;
insert into 
(
    select
        "id",
        color,
        date_day from
    (
        select
            'blue' as color,
            1 as id,
            '2019-01-01' as date_day
    ) as model_subq
)
;
"""


class TrinoColumnEqualSetup:
    @pytest.fixture
    def string_type(self):
        return "VARCHAR"

    @pytest.fixture
    def data_types(self, schema_int_type, int_type, string_type):
        # sql_column_value, schema_data_type, error_data_type
        return [
            ["1", schema_int_type, int_type],
            ["'1'", string_type, string_type],
            ["cast('2019-01-01' as date)", "date", "DATE"],
            ["true", "boolean", "BOOLEAN"],
            ["cast('2013-11-03 00:00:00-07' as TIMESTAMP)", "timestamp(6)", "TIMESTAMP"],
            [
                "cast('2013-11-03 00:00:00-07' as TIMESTAMP WITH TIME ZONE)",
                "timestamp(6)",
                "TIMESTAMP",
            ],
            ["ARRAY['a','b','c']", "ARRAY(VARCHAR)", "ARRAY"],
            ["ARRAY[1,2,3]", "ARRAY(INTEGER)", "ARRAY"],
            ["cast('1' as DECIMAL)", "DECIMAL", "DECIMAL"],
        ]


@pytest.mark.iceberg
class TestTrinoTableConstraintsColumnsEqual(
    TrinoColumnEqualSetup, BaseTableConstraintsColumnsEqual
):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model_wrong_order.sql": my_model_wrong_order_sql,
            "my_model_wrong_name.sql": my_model_wrong_name_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }


class TestTrinoViewConstraintsColumnsEqual(TrinoColumnEqualSetup, BaseViewConstraintsColumnsEqual):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model_wrong_order.sql": my_model_view_wrong_order_sql,
            "my_model_wrong_name.sql": my_model_view_wrong_name_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }


@pytest.mark.iceberg
class TestTrinoIncrementalConstraintsColumnsEqual(
    TrinoColumnEqualSetup, BaseIncrementalConstraintsColumnsEqual
):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model_wrong_order.sql": my_model_incremental_wrong_order_sql,
            "my_model_wrong_name.sql": my_model_incremental_wrong_name_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }


@pytest.mark.iceberg
class TestTrinoTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_model_wrong_order_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_sql(self):
        return _expected_sql_trino


@pytest.mark.iceberg
class TestTrinoTableConstraintsRollback(BaseConstraintsRollback):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_model_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_error_messages(self):
        return ["NULL value not allowed for NOT NULL column: id"]


@pytest.mark.iceberg
class TestTrinoIncrementalConstraintsRuntimeDdlEnforcement(
    BaseIncrementalConstraintsRuntimeDdlEnforcement
):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_model_incremental_wrong_order_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_sql(self):
        return _expected_sql_trino


@pytest.mark.iceberg
class TestTrinoIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_incremental_model_sql,
            "constraints_schema.yml": trino_model_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_error_messages(self):
        return ["NULL value not allowed for NOT NULL column: id"]


class TestTrinoTableContractSqlHeader(BaseTableContractSqlHeader):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model_contract_sql_header.sql": trino_model_contract_sql_header_sql,
            "constraints_schema.yml": trino_model_contract_header_schema_yml,
        }


class TestTrinoIncrementalContractSqlHeader(BaseIncrementalContractSqlHeader):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model_contract_sql_header.sql": trino_model_incremental_contract_sql_header_sql,
            "constraints_schema.yml": trino_model_contract_header_schema_yml,
        }


@pytest.mark.iceberg
class TestTrinoModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_model_sql,
            "constraints_schema.yml": trino_constrained_model_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_sql(self):
        return """
create table  (
    "id" integer not null,
    color varchar,
    date_day varchar
) ;
insert into 
(
    select
        "id",
        color,
        date_day from
    (
        select
            1 as id,
            'blue' as color,
            '2019-01-01' as date_day
    ) as model_subq
)
;
"""


@pytest.mark.iceberg
class TestTrinoConstraintQuotedColumn(BaseConstraintQuotedColumn):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_model.sql": my_model_with_quoted_column_name_sql,
            "constraints_schema.yml": trino_model_quoted_column_schema_yml,
        }

    @pytest.fixture(scope="class")
    def expected_sql(self):
        return """
create table  (
    id integer not null,
    "from" varchar not null,
    date_day varchar
) ;
insert into 
(
    select id, "from", date_day
    from (
        select
          'blue' as "from",
          1 as id,
          '2019-01-01' as date_day
    ) as model_subq
);
"""


================================================
FILE: tests/functional/adapter/dbt_clone/test_dbt_clone.py
================================================
import pytest
from dbt.tests.adapter.dbt_clone.fixtures import (
    custom_can_clone_tables_false_macros_sql,
    get_schema_name_sql,
    infinite_macros_sql,
    macros_sql,
)
from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseCloneNotPossible

iceberg_macro_override_sql = """
{% macro trino__current_timestamp() -%}
    current_timestamp(6)
{%- endmacro %}
"""


class TestTrinoCloneNotPossible(BaseCloneNotPossible):
    @pytest.fixture(scope="class")
    def macros(self):
        return {
            "macros.sql": macros_sql,
            "my_can_clone_tables.sql": custom_can_clone_tables_false_macros_sql,
            "infinite_macros.sql": infinite_macros_sql,
            "get_schema_name.sql": get_schema_name_sql,
            "iceberg.sql": iceberg_macro_override_sql,
        }

    # TODO: below method probably should be implemented in base class (on dbt-core side)
    @pytest.fixture(autouse=True)
    def clean_up(self, project):
        yield
        with project.adapter.connection_named("__test"):
            relation = project.adapter.Relation.create(
                database=project.database, schema=f"{project.test_schema}_seeds"
            )
            project.adapter.drop_schema(relation)

            relation = project.adapter.Relation.create(
                database=project.database, schema=project.test_schema
            )
            project.adapter.drop_schema(relation)


================================================
FILE: tests/functional/adapter/dbt_debug/test_dbt_debug.py
================================================
import pytest
from dbt.tests.adapter.dbt_debug.test_dbt_debug import (
    BaseDebug,
    BaseDebugProfileVariable,
)
from dbt.tests.util import run_dbt


class TestDebugTrino(BaseDebug):
    # TODO: below teardown method probably should be implemented in base class (on dbt-core side)
    @pytest.fixture(scope="function", autouse=True)
    def teardown_method(self, project):
        yield
        project.run_sql(f"drop schema if exists {project.test_schema}")

    def test_ok_trino(self, project):
        run_dbt(["debug"])
        assert "ERROR" not in self.capsys.readouterr().out


class TestDebugProfileVariableTrino(BaseDebugProfileVariable):
    # TODO: below teardown method probably should be implemented in base class (on dbt-core side)
    @pytest.fixture(scope="function", autouse=True)
    def teardown_method(self, project):
        yield
        project.run_sql(f"drop schema if exists {project.test_schema}")

    def test_ok_trino(self, project):
        run_dbt(["debug"])
        assert "ERROR" not in self.capsys.readouterr().out


================================================
FILE: tests/functional/adapter/dbt_show/test_dbt_show.py
================================================
from dbt.tests.adapter.dbt_show.test_dbt_show import BaseShowLimit, BaseShowSqlHeader


class TestTrinoShowSqlHeader(BaseShowSqlHeader):
    pass


class TestTrinoShowLimit(BaseShowLimit):
    pass


================================================
FILE: tests/functional/adapter/empty/test_empty.py
================================================
from dbt.tests.adapter.empty.test_empty import (
    BaseTestEmpty,
    BaseTestEmptyInlineSourceRef,
)


class TestTrinoEmpty(BaseTestEmpty):
    pass


class TestTrinoEmptyInlineSourceRef(BaseTestEmptyInlineSourceRef):
    pass


================================================
FILE: tests/functional/adapter/fixture_datediff.py
================================================
seeds__data_datediff_csv = """first_date,second_date,datepart,result
2018-01-01 01:00:00,2018-01-02 01:00:00,day,1
2018-01-01 01:00:00,2018-02-01 01:00:00,month,1
2018-01-01 01:00:00,2019-01-01 01:00:00,year,1
2018-01-01 01:00:00,2018-01-01 02:00:00,hour,1
2018-01-01 01:00:00,2018-01-01 02:01:00,minute,61
2018-01-01 01:00:00,2018-01-01 02:00:01,second,3601
2019-12-31 00:00:00,2019-12-27 00:00:00,week,-1
2019-12-31 00:00:00,2019-12-30 00:00:00,week,0
2019-12-31 00:00:00,2020-01-02 00:00:00,week,0
2019-12-31 00:00:00,2020-01-06 02:00:00,week,1
,2018-01-01 02:00:00,hour,
2018-01-01 02:00:00,,hour,
"""


models__test_datediff_sql = """
with data as (

    select * from {{ ref('data_datediff') }}

)

select

    case
        when datepart = 'second' then {{ datediff('first_date', 'second_date', 'second') }}
        when datepart = 'minute' then {{ datediff('first_date', 'second_date', 'minute') }}
        when datepart = 'hour' then {{ datediff('first_date', 'second_date', 'hour') }}
        when datepart = 'day' then {{ datediff('first_date', 'second_date', 'day') }}
        when datepart = 'week' then {{ datediff('first_date', 'second_date', 'week') }}
        when datepart = 'month' then {{ datediff('first_date', 'second_date', 'month') }}
        when datepart = 'year' then {{ datediff('first_date', 'second_date', 'year') }}
        else null
    end as actual,
    result as expected

from data

-- Also test correct casting of literal values.

union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "millisecond") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "second") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "minute") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "hour") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "day") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-03 00:00:00.000000'", "week") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "month") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "quarter") }} as actual, 1 as expected
union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "year") }} as actual, 1 as expected
"""


================================================
FILE: tests/functional/adapter/hooks/data/seed_model.sql
================================================
drop table if exists {schema}.on_model_hook;

create table {schema}.on_model_hook (
    test_state       VARCHAR, -- start|end
    target_dbname    VARCHAR,
    target_host      VARCHAR,
    target_name      VARCHAR,
    target_schema    VARCHAR,
    target_type      VARCHAR,
    target_user      VARCHAR,
    target_pass      VARCHAR,
    target_threads   INTEGER,
    run_started_at   VARCHAR,
    invocation_id    VARCHAR,
    thread_id        VARCHAR
);


================================================
FILE: tests/functional/adapter/hooks/data/seed_run.sql
================================================
drop table if exists {schema}.on_run_hook;

create table {schema}.on_run_hook (
    test_state       VARCHAR, -- start|end
    target_dbname    VARCHAR,
    target_host      VARCHAR,
    target_name      VARCHAR,
    target_schema    VARCHAR,
    target_type      VARCHAR,
    target_user      VARCHAR,
    target_pass      VARCHAR,
    target_threads   INTEGER,
    run_started_at   VARCHAR,
    invocation_id    VARCHAR,
    thread_id        VARCHAR
);


================================================
FILE: tests/functional/adapter/hooks/test_hooks_delete.py
================================================
# Test hooks with DELETE statement
import pytest
from dbt.tests.util import run_dbt, run_sql_with_adapter

seed = """
id,name,some_date
1,Easton,1981-05-20
2,Lillian,1978-09-03
3,Jeremiah,1982-03-11
4,Nolan,1976-05-06
5,Hannah,1982-06-23ľ
6,Eleanor,1991-08-10
7,Lily,1971-03-29
8,Jonathan,1988-02-26
9,Adrian,1994-02-09
10,Nora,1976-03-01
""".lstrip()

model = """
  {{ config(
        materialized="table",
        on_table_exists = 'drop'
     )
  }}
  select * from {{ ref('seed') }}
"""


class BaseTestHooksDelete:
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed,
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "model.sql": model,
        }

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "pre-hook": "DELETE FROM seed WHERE name IN ('Jeremiah','Eleanor');",
                "post-hook": "DELETE FROM seed WHERE name IN ('Nolan','Jonathan','Nora');",
            }
        }

    def test_pre_and_post_run_hooks(self, project, dbt_profile_target):
        # Run seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Check if table has all rows
        sql_seed = "SELECT COUNT(*) from seed"
        query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch="all")
        assert query_results[0][0] == 10

        # Run model, hooks should run DELETE statements
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # 2 rows were deleted in pre-hook
        sql_model = "SELECT COUNT(*) from model"
        query_results = run_sql_with_adapter(project.adapter, sql_model, fetch="all")
        assert query_results[0][0] == 8

        # 2 rows were deleted in pre-hook, and 3 in post-hook
        query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch="all")
        assert query_results[0][0] == 5


@pytest.mark.delta
class TestBaseTestHooksDeleteDelta(BaseTestHooksDelete):
    pass


@pytest.mark.iceberg
class TestBaseTestHooksDeleteIceberg(BaseTestHooksDelete):
    pass


================================================
FILE: tests/functional/adapter/hooks/test_model_hooks.py
================================================
import pytest
from dbt.tests.adapter.hooks import test_model_hooks as core_base


class TestTrinoPrePostModelHooks(core_base.TestPrePostModelHooks):
    def check_hooks(self, state, project, host, count=1):
        self.get_ctx_vars(state, count=count, project=project)


class TestTrinoPrePostModelHooksUnderscores(core_base.TestPrePostModelHooksUnderscores):
    def check_hooks(self, state, project, host, count=1):
        self.get_ctx_vars(state, count=count, project=project)


class TestTrinoHookRefs(core_base.TestHookRefs):
    def check_hooks(self, state, project, host, count=1):
        self.get_ctx_vars(state, count=count, project=project)


@pytest.mark.iceberg
class TestTrinoPrePostModelHooksOnSeeds(core_base.TestPrePostModelHooksOnSeeds):
    def check_hooks(self, state, project, host, count=1):
        self.get_ctx_vars(state, count=count, project=project)

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "seed-paths": ["seeds"],
            "models": {},
            "seeds": {
                "+post-hook": [
                    "alter table {{ this }} add column new_col int",
                    "update {{ this }} set new_col = 1 where 1=1",
                ],
                "quote_columns": True,
            },
        }


================================================
FILE: tests/functional/adapter/hooks/test_run_hooks.py
================================================
import pytest
from dbt.tests.adapter.hooks.test_run_hooks import (
    BaseAfterRunHooks,
    BasePrePostRunHooks,
)


class TestPrePostRunHooksTrino(BasePrePostRunHooks):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            # The create and drop table statements here validate that these hooks run
            # in the same order that they are defined. Drop before create is an error.
            # Also check that the table does not exist below.
            "on-run-start": [
                "{{ custom_run_hook('start', target, run_started_at, invocation_id) }}",
                "create table {{ target.schema }}.start_hook_order_test ( id int )",
                "drop table {{ target.schema }}.start_hook_order_test",
                "{{ log(env_var('TERM_TEST'), info=True) }}",
            ],
            "on-run-end": [
                "{{ custom_run_hook('end', target, run_started_at, invocation_id) }}",
                "create table {{ target.schema }}.end_hook_order_test ( id int )",
                "drop table {{ target.schema }}.end_hook_order_test",
                "create table {{ target.schema }}.schemas ( schema varchar )",
                "insert into {{ target.schema }}.schemas (schema) values {% for schema in schemas %}( '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}",
                "create table {{ target.schema }}.db_schemas ( db varchar, schema varchar )",
                "insert into {{ target.schema }}.db_schemas (db, schema) values {% for db, schema in database_schemas %}('{{ db }}', '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}",
            ],
            "seeds": {
                "quote_columns": False,
            },
        }

    def check_hooks(self, state, project, host):
        ctx = self.get_ctx_vars(state, project)

        assert ctx["test_state"] == state
        assert ctx["target_dbname"] == ""
        assert ctx["target_host"] == host
        assert ctx["target_name"] == "default"
        assert ctx["target_schema"] == project.test_schema
        assert ctx["target_threads"] == 4
        assert ctx["target_type"] == project.adapter_type
        assert "admin" in ctx["target_user"]
        assert ctx["target_pass"] == ""

        assert (
            ctx["run_started_at"] is not None and len(ctx["run_started_at"]) > 0
        ), "run_started_at was not set"
        assert (
            ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0
        ), "invocation_id was not set"


class TestAfterRunHooksTrino(BaseAfterRunHooks):
    pass


================================================
FILE: tests/functional/adapter/materialization/fixtures.py
================================================
seed_csv = """
id,name,some_date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
""".lstrip()

model_sql = """
select * from {{ ref('seed') }}
"""

model_cte_sql = """
with source_data as (
    select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4
)
select id
       ,field1
       ,field2
       ,field3
       ,field4
from source_data
"""


profile_yml = """
version: 2
models:
  - name: materialization
    columns:
      - name: id
        tests:
          - unique
          - not_null
      - name: name
        tests:
          - not_null
"""


schema_base_yml = """\
version: 2

models:
  - name: model_a
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_ignore
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_ignore_target
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_append_new_columns
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_append_new_columns_target
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_append_new_columns_remove_one
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_append_new_columns_remove_one_target
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns_target
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns_quoted
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns_quoted_target
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns_diff_data_types
    columns:
      - name: id
        tests:
          - unique

  - name: incremental_sync_all_columns_diff_data_types_target
    columns:
      - name: id
        tests:
          - unique
"""

model_a_sql = """\
{{
    config(materialized='table')
}}

with source_data as (

    select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4
    union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4
    union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4
    union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4
    union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4
    union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4

)

select id
       ,field1
       ,field2
       ,field3
       ,field4

from source_data
"""

incremental_ignore_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='ignore'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental() %}

SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

SELECT id, field1, field2 FROM source_data LIMIT 3

{% endif %}
"""

incremental_ignore_target_sql = """\
{{
    config(materialized='table')
}}

with source_data as (

    select * from {{ ref('model_a') }}

)

select id
       ,field1
       ,field2

from source_data
"""

incremental_append_new_columns = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='append_new_columns'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental()  %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field2 as varchar) as field2,
       cast(field3 as varchar) as field3,
       cast(field4 as varchar) as field4
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field2 as varchar) as field2
FROM source_data where id <= 3

{% endif %}
"""

incremental_append_new_columns_target_sql = """\
{{
    config(materialized='table')
}}

with source_data as (

    select * from {{ ref('model_a') }}

)

select id
       ,cast(field1 as varchar) as field1
       ,cast(field2 as varchar) as field2
       ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as varchar) AS field3
       ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as varchar) AS field4

from source_data
"""

incremental_append_new_columns_remove_one_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='append_new_columns'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental()  %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field3 as varchar) as field3,
       cast(field4 as varchar) as field4
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field2 as varchar) as field2
FROM source_data where id <= 3

{% endif %}
"""

incremental_append_new_columns_remove_one_target_sql = """\
{{
    config(materialized='table')
}}
with source_data as (

    select * from {{ ref('model_a') }}

)

select id,
       cast(field1 as varchar) as field1,
       cast(CASE WHEN id >  3 THEN NULL ELSE field2 END as varchar) AS field2,
       cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as varchar) AS field3,
       cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as varchar) AS field4

from source_data
"""


incremental_fail_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='fail'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental()  %}

SELECT id, field1, field2 FROM source_data

{% else %}

SELECT id, field1, field3 FROm source_data

{% endif %}
"""

incremental_sync_all_columns_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='sync_all_columns'

    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental() %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field3 as varchar) as field3, -- to validate new fields
       cast(field4 as varchar) AS field4 -- to validate new fields

FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

select id,
       cast(field1 as varchar) as field1,
       cast(field2 as varchar) as field2

from source_data where id <= 3

{% endif %}
"""

incremental_sync_all_columns_target_sql = """\
{{
    config(materialized='table')
}}

with source_data as (

    select * from {{ ref('model_a') }}

)
select id
       ,cast(field1 as varchar) as field1
       --,field2
       ,cast(case when id <= 3 then null else field3 end as varchar) as field3
       ,cast(case when id <= 3 then null else field4 end as varchar) as field4

from source_data
order by id
"""

incremental_sync_all_columns_quoted_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='sync_all_columns'

    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental() %}

SELECT id,
       cast(field1 as varchar) as field1,
       cast(field3 as varchar) as "3field3", -- to validate new fields
       cast(field4 as varchar) AS "4field4" -- to validate new fields

FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

select id,
       cast(field1 as varchar) as field1,
       cast(field2 as varchar) as "2field2"

from source_data where id <= 3

{% endif %}
"""

incremental_sync_all_columns_quoted_target_sql = """\
{{
    config(materialized='table')
}}

with source_data as (

    select * from {{ ref('model_a') }}

)
select id
       ,cast(field1 as varchar) as field1
       --,field2
       ,cast(case when id <= 3 then null else field3 end as varchar) as "3field3"
       ,cast(case when id <= 3 then null else field4 end as varchar) as "4field4"

from source_data
order by id
"""

incremental_sync_all_columns_diff_data_types_sql = """\
{{
    config(
        materialized='incremental',
        unique_key='id',
        on_schema_change='sync_all_columns'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

{% if is_incremental() %}

SELECT id,
       cast(id as varchar) "field1" -- to validate data type changes

FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )

{% else %}

select id,
       id "field1"

from source_data where id <= 3
order by id
{% endif %}
"""

incremental_sync_all_columns_diff_data_types_target_sql = """\
{{
    config(
        materialized='table'
    )
}}

WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )

select id,
       cast(id as varchar) "field1"

from source_data
order by id
"""

select_from_a_sql = "select * from {{ ref('model_a') }} where false"

select_from_incremental_append_new_columns_sql = (
    "select * from {{ ref('incremental_append_new_columns') }} where false"
)

select_from_incremental_append_new_columns_remove_one_sql = (
    "select * from {{ ref('incremental_append_new_columns_remove_one') }} where false"
)

select_from_incremental_append_new_columns_remove_one_target_sql = (
    "select * from {{ ref('incremental_append_new_columns_remove_one_target') }} where false"
)

select_from_incremental_append_new_columns_target_sql = (
    "select * from {{ ref('incremental_append_new_columns_target') }} where false"
)

select_from_incremental_ignore_sql = "select * from {{ ref('incremental_ignore') }} where false"

select_from_incremental_ignore_target_sql = (
    "select * from {{ ref('incremental_ignore_target') }} where false"
)

select_from_incremental_sync_all_columns_sql = (
    "select * from {{ ref('incremental_sync_all_columns') }} where false"
)

select_from_incremental_sync_all_columns_target_sql = (
    "select * from {{ ref('incremental_sync_all_columns_target') }} where false"
)

select_from_incremental_sync_all_columns_quoted_sql = (
    "select * from {{ ref('incremental_sync_all_columns_quoted') }} where false"
)

select_from_incremental_sync_all_columns_quoted_target_sql = (
    "select * from {{ ref('incremental_sync_all_columns_quoted_target') }} where false"
)

select_from_incremental_sync_all_columns_diff_data_types_sql = (
    "select * from {{ ref('incremental_sync_all_columns_diff_data_types') }} where false"
)

select_from_incremental_sync_all_columns_diff_data_types_target_sql = (
    "select * from {{ ref('incremental_sync_all_columns_diff_data_types_target') }} where false"
)


================================================
FILE: tests/functional/adapter/materialization/test_incremental_delete_insert.py
================================================
import pytest
from dbt.tests.adapter.incremental.test_incremental_predicates import (
    BaseIncrementalPredicates,
    models__delete_insert_incremental_predicates_sql,
    seeds__expected_delete_insert_incremental_predicates_csv,
)
from dbt.tests.adapter.incremental.test_incremental_unique_id import (
    BaseIncrementalUniqueKey,
    models__duplicated_unary_unique_key_list_sql,
    models__empty_str_unique_key_sql,
    models__empty_unique_key_list_sql,
    models__no_unique_key_sql,
    models__nontyped_trinary_unique_key_list_sql,
    models__not_found_unique_key_list_sql,
    models__not_found_unique_key_sql,
    models__str_unique_key_sql,
    models__trinary_unique_key_list_sql,
    models__unary_unique_key_list_sql,
    seeds__seed_csv,
)
from dbt.tests.util import run_dbt_and_capture

seeds__duplicate_insert_sql = """
-- Insert statement which when applied to seed.csv triggers the inplace
--   overwrite strategy of incremental models. Seed and incremental model
--   diverge.

-- insert new row, which should not be in incremental model
--  with primary or first three columns unique
insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('CT','Hartford','Hartford',DATE '2022-02-14');

"""

seeds__add_new_rows_sql = """
-- Insert statement which when applied to seed.csv sees incremental model
--   grow in size while not (necessarily) diverging from the seed itself.

-- insert two new rows, both of which should be in incremental model
--   with any unique columns
insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('WA','King','Seattle',DATE '2022-02-01');

insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('CA','Los Angeles','Los Angeles',DATE '2022-02-01');

"""

models__expected__one_str__overwrite_sql = """
{{
    config(
        materialized='table'
    )
}}

select
    'CT' as state,
    'Hartford' as county,
    'Hartford' as city,
    cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston',DATE '2020-02-12'
union all
select 'NJ','Mercer','Trenton',DATE '2022-01-01'
union all
select 'NY','Kings','Brooklyn',DATE '2021-04-02'
union all
select 'NY','New York','Manhattan',DATE '2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
union all
select 'CO','Denver',null,DATE '2021-06-18'

"""

models__expected__unique_key_list__inplace_overwrite_sql = """
{{
    config(
        materialized='table'
    )
}}

select
    'CT' as state,
    'Hartford' as county,
    'Hartford' as city,
    cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston',DATE '2020-02-12'
union all
select 'NJ','Mercer','Trenton',DATE '2022-01-01'
union all
select 'NY','Kings','Brooklyn',DATE '2021-04-02'
union all
select 'NY','New York','Manhattan',DATE '2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
union all
select 'CO','Denver',null,DATE '2021-06-18'

"""

models__location_specified = """
{{
    config(
        materialized='incremental',
        incremental_strategy='delete+insert',
        unique_key=['state', 'county', 'city'],
        properties= {
            "location": "'s3a://datalake/model'"
        }
    )
}}

select
    'CT' as state,
    'Hartford' as county,
    'Hartford' as city,
    cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston',DATE '2020-02-12'
union all
select 'NJ','Mercer','Trenton',DATE '2022-01-01'
union all
select 'NY','Kings','Brooklyn',DATE '2021-04-02'
union all
select 'NY','New York','Manhattan',DATE '2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'

"""

models__delete_insert_composite_keys_sql = """
{{
    config(
        materialized='incremental',
        incremental_strategy='delete+insert',
        unique_key=['id', 'col']
    )
}}
select 1 as id, 1 as col
union all
select 1 as id, 3 as col
union all
select 3 as id, 1 as col
union all
select 3 as id, 3 as col

{% if is_incremental() %}

except
(select 1 as id, 1 as col
union all
select 3 as id, 3 as col)

{% endif %}
"""

seeds__expected_delete_insert_composite_keys_csv = """id,col
1,1
1,3
3,1
3,3
"""


class TrinoIncrementalUniqueKey(BaseIncrementalUniqueKey):
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "duplicate_insert.sql": seeds__duplicate_insert_sql,
            "seed.csv": seeds__seed_csv,
            "add_new_rows.sql": seeds__add_new_rows_sql,
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "trinary_unique_key_list.sql": models__trinary_unique_key_list_sql,
            "nontyped_trinary_unique_key_list.sql": models__nontyped_trinary_unique_key_list_sql,
            "unary_unique_key_list.sql": models__unary_unique_key_list_sql,
            "not_found_unique_key.sql": models__not_found_unique_key_sql,
            "empty_unique_key_list.sql": models__empty_unique_key_list_sql,
            "no_unique_key.sql": models__no_unique_key_sql,
            "empty_str_unique_key.sql": models__empty_str_unique_key_sql,
            "str_unique_key.sql": models__str_unique_key_sql,
            "duplicated_unary_unique_key_list.sql": models__duplicated_unary_unique_key_list_sql,
            "not_found_unique_key_list.sql": models__not_found_unique_key_list_sql,
            "expected": {
                "one_str__overwrite.sql": models__expected__one_str__overwrite_sql,
                "unique_key_list__inplace_overwrite.sql": models__expected__unique_key_list__inplace_overwrite_sql,
            },
        }


@pytest.mark.iceberg
class TestIcebergIncrementalDeleteInsert(TrinoIncrementalUniqueKey):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "incremental",
            "models": {"+incremental_strategy": "delete+insert"},
            "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
        }


@pytest.mark.delta
class TestDeltaIncrementalDeleteInsert(TrinoIncrementalUniqueKey):
    def test__no_unique_keys(self, project):
        super().test__no_unique_keys(project)

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "incremental",
            "models": {"+on_table_exists": "drop", "+incremental_strategy": "delete+insert"},
            "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
        }


@pytest.mark.iceberg
@pytest.mark.skip_profile("starburst_galaxy")
class TestIcebergIncrementalDeleteInsertWithLocation:
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "model.sql": models__location_specified,
        }

    def test_temporary_table_location(self, project):
        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert f'create table "{project.database}"."{project.test_schema}"."model"' in logs
        assert "location = 's3a://datalake/model'" in logs

        # Temporary table is created on the second run
        # So, now we check if the second run is successful and location
        # is patched correctly
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table "{project.database}"."{project.test_schema}"."model__dbt_tmp"' in logs
        )
        assert "location = 's3a://datalake/model__dbt_tmp'" in logs


@pytest.mark.iceberg
class TestIcebergCompositeUniqueKeys(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "expected_delete_insert_incremental_predicates.csv": seeds__expected_delete_insert_incremental_predicates_csv,
            "expected_delete_insert_composite_keys.csv": seeds__expected_delete_insert_composite_keys_csv,
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "delete_insert_incremental_predicates.sql": models__delete_insert_incremental_predicates_sql,
            "delete_insert_composite_keys.sql": models__delete_insert_composite_keys_sql,
        }

    def test__incremental_predicates_composite_keys(self, project):
        """seed should match model after two incremental runs"""

        expected_fields = self.get_expected_fields(
            relation="expected_delete_insert_composite_keys", seed_rows=4
        )
        test_case_fields = self.get_test_fields(
            project,
            seed="expected_delete_insert_composite_keys",
            incremental_model="delete_insert_composite_keys",
            update_sql_file=None,
        )
        self.check_scenario_correctness(expected_fields, test_case_fields, project)


================================================
FILE: tests/functional/adapter/materialization/test_incremental_merge.py
================================================
import pytest
from dbt.tests.adapter.incremental.test_incremental_unique_id import (
    BaseIncrementalUniqueKey,
    models__duplicated_unary_unique_key_list_sql,
    models__empty_str_unique_key_sql,
    models__empty_unique_key_list_sql,
    models__no_unique_key_sql,
    models__nontyped_trinary_unique_key_list_sql,
    models__not_found_unique_key_list_sql,
    models__not_found_unique_key_sql,
    models__str_unique_key_sql,
    models__trinary_unique_key_list_sql,
    models__unary_unique_key_list_sql,
    seeds__seed_csv,
)

seeds__duplicate_insert_sql = """
-- Insert statement which when applied to seed.csv triggers the inplace
--   overwrite strategy of incremental models. Seed and incremental model
--   diverge.

-- insert new row, which should not be in incremental model
--  with primary or first three columns unique
insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('CT','Hartford','Hartford',DATE '2022-02-14');

"""

seeds__add_new_rows_sql = """
-- Insert statement which when applied to seed.csv sees incremental model
--   grow in size while not (necessarily) diverging from the seed itself.

-- insert two new rows, both of which should be in incremental model
--   with any unique columns
insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('WA','King','Seattle',DATE '2022-02-01');

insert into {schema}.seed
    (state, county, city, last_visit_date)
values ('CA','Los Angeles','Los Angeles',DATE '2022-02-01');

"""

models__expected__one_str__overwrite_sql = """
{{
    config(
        materialized='table'
    )
}}

select
    'CT' as state,
    'Hartford' as county,
    'Hartford' as city,
    cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston',DATE '2020-02-12'
union all
select 'NJ','Mercer','Trenton',DATE '2022-01-01'
union all
select 'NY','Kings','Brooklyn',DATE '2021-04-02'
union all
select 'NY','New York','Manhattan',DATE '2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
union all
select 'CO','Denver',null,DATE '2021-06-18'

"""

models__expected__unique_key_list__inplace_overwrite_sql = """
{{
    config(
        materialized='table'
    )
}}

select
    'CT' as state,
    'Hartford' as county,
    'Hartford' as city,
    cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston',DATE '2020-02-12'
union all
select 'NJ','Mercer','Trenton',DATE '2022-01-01'
union all
select 'NY','Kings','Brooklyn',DATE '2021-04-02'
union all
select 'NY','New York','Manhattan',DATE '2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
union all
select 'CO','Denver',null,DATE '2021-06-18'

"""


class TrinoIncrementalUniqueKey(BaseIncrementalUniqueKey):
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "duplicate_insert.sql": seeds__duplicate_insert_sql,
            "seed.csv": seeds__seed_csv,
            "add_new_rows.sql": seeds__add_new_rows_sql,
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "trinary_unique_key_list.sql": models__trinary_unique_key_list_sql,
            "nontyped_trinary_unique_key_list.sql": models__nontyped_trinary_unique_key_list_sql,
            "unary_unique_key_list.sql": models__unary_unique_key_list_sql,
            "not_found_unique_key.sql": models__not_found_unique_key_sql,
            "empty_unique_key_list.sql": models__empty_unique_key_list_sql,
            "no_unique_key.sql": models__no_unique_key_sql,
            "empty_str_unique_key.sql": models__empty_str_unique_key_sql,
            "str_unique_key.sql": models__str_unique_key_sql,
            "duplicated_unary_unique_key_list.sql": models__duplicated_unary_unique_key_list_sql,
            "not_found_unique_key_list.sql": models__not_found_unique_key_list_sql,
            "expected": {
                "one_str__overwrite.sql": models__expected__one_str__overwrite_sql,
                "unique_key_list__inplace_overwrite.sql": models__expected__unique_key_list__inplace_overwrite_sql,
            },
        }


@pytest.mark.iceberg
class TestIcebergIncrementalMerge(TrinoIncrementalUniqueKey):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "incremental",
            "models": {"+incremental_strategy": "merge"},
            "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
        }


@pytest.mark.delta
class TestDeltaIncrementalMerge(TrinoIncrementalUniqueKey):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "incremental",
            "models": {
                "+on_table_exists": "drop",
                "+incremental_strategy": "merge",
            },
            "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
        }


================================================
FILE: tests/functional/adapter/materialization/test_incremental_microbatch.py
================================================
import pytest
from dbt.tests.adapter.incremental.test_incremental_microbatch import BaseMicrobatch


@pytest.mark.iceberg
class TestTrinoMicrobatchIceberg(BaseMicrobatch):
    pass


================================================
FILE: tests/functional/adapter/materialization/test_incremental_predicates.py
================================================
import pytest
from dbt.tests.adapter.incremental.test_incremental_predicates import (
    BaseIncrementalPredicates,
)


@pytest.mark.iceberg
class TestIcebergPredicatesDeleteInsertTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}}


@pytest.mark.delta
class TestDeltaPredicatesDeleteInsertTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}}


@pytest.mark.iceberg
class TestIcebergIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "+incremental_predicates": ["dbt_internal_dest.id != 2"],
                "+incremental_strategy": "merge",
            }
        }


@pytest.mark.delta
class TestDeltaIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "+incremental_predicates": ["dbt_internal_dest.id != 2"],
                "+incremental_strategy": "merge",
            }
        }


@pytest.mark.iceberg
class TestIcebergPredicatesMergeTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "+predicates": ["dbt_internal_dest.id != 2"],
                "+incremental_strategy": "merge",
            }
        }


@pytest.mark.delta
class TestDeltaPredicatesMergeTrino(BaseIncrementalPredicates):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "+predicates": ["dbt_internal_dest.id != 2"],
                "+incremental_strategy": "merge",
            }
        }


================================================
FILE: tests/functional/adapter/materialization/test_incremental_schema.py
================================================
import pytest
from dbt.tests.util import check_relations_equal, run_dbt

from tests.functional.adapter.materialization.fixtures import (
    incremental_append_new_columns,
    incremental_append_new_columns_remove_one_sql,
    incremental_append_new_columns_remove_one_target_sql,
    incremental_append_new_columns_target_sql,
    incremental_fail_sql,
    incremental_ignore_sql,
    incremental_ignore_target_sql,
    incremental_sync_all_columns_diff_data_types_sql,
    incremental_sync_all_columns_diff_data_types_target_sql,
    incremental_sync_all_columns_quoted_sql,
    incremental_sync_all_columns_quoted_target_sql,
    incremental_sync_all_columns_sql,
    incremental_sync_all_columns_target_sql,
    model_a_sql,
    schema_base_yml,
    select_from_a_sql,
    select_from_incremental_append_new_columns_remove_one_sql,
    select_from_incremental_append_new_columns_remove_one_target_sql,
    select_from_incremental_append_new_columns_sql,
    select_from_incremental_append_new_columns_target_sql,
    select_from_incremental_ignore_sql,
    select_from_incremental_ignore_target_sql,
    select_from_incremental_sync_all_columns_diff_data_types_sql,
    select_from_incremental_sync_all_columns_diff_data_types_target_sql,
    select_from_incremental_sync_all_columns_quoted_sql,
    select_from_incremental_sync_all_columns_quoted_target_sql,
    select_from_incremental_sync_all_columns_sql,
    select_from_incremental_sync_all_columns_target_sql,
)


class OnSchemaChangeBase:
    # configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {"name": "on_schema_change"}

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "model_a.sql": model_a_sql,
            "incremental_ignore.sql": incremental_ignore_sql,
            "incremental_ignore_target.sql": incremental_ignore_target_sql,
            "incremental_append_new_columns.sql": incremental_append_new_columns,
            "incremental_append_new_columns_target.sql": incremental_append_new_columns_target_sql,
            "incremental_append_new_columns_remove_one.sql": incremental_append_new_columns_remove_one_sql,
            "incremental_append_new_columns_remove_one_target.sql": incremental_append_new_columns_remove_one_target_sql,
            "incremental_fail.sql": incremental_fail_sql,
            "incremental_sync_all_columns.sql": incremental_sync_all_columns_sql,
            "incremental_sync_all_columns_target.sql": incremental_sync_all_columns_target_sql,
            "incremental_sync_all_columns_quoted.sql": incremental_sync_all_columns_quoted_sql,
            "incremental_sync_all_columns_quoted_target.sql": incremental_sync_all_columns_quoted_target_sql,
            "incremental_sync_all_columns_diff_data_types.sql": incremental_sync_all_columns_diff_data_types_sql,
            "incremental_sync_all_columns_diff_data_types_target.sql": incremental_sync_all_columns_diff_data_types_target_sql,
            "schema.yml": schema_base_yml,
        }

    @pytest.fixture(scope="class")
    def tests(self):
        return {
            "select_from_a.sql": select_from_a_sql,
            "select_from_incremental_append_new_columns.sql": select_from_incremental_append_new_columns_sql,
            "select_from_incremental_append_new_columns_remove_one.sql": select_from_incremental_append_new_columns_remove_one_sql,
            "select_from_incremental_append_new_columns_remove_one_target.sql": select_from_incremental_append_new_columns_remove_one_target_sql,
            "select_from_incremental_append_new_columns_target.sql": select_from_incremental_append_new_columns_target_sql,
            "select_from_incremental_ignore.sql": select_from_incremental_ignore_sql,
            "select_from_incremental_ignore_target.sql": select_from_incremental_ignore_target_sql,
            "select_from_incremental_sync_all_columns.sql": select_from_incremental_sync_all_columns_sql,
            "select_from_incremental_sync_all_columns_target.sql": select_from_incremental_sync_all_columns_target_sql,
            "select_from_incremental_sync_all_columns_quoted.sql": select_from_incremental_sync_all_columns_quoted_sql,
            "select_from_incremental_sync_all_columns_quoted_target.sql": select_from_incremental_sync_all_columns_quoted_target_sql,
            "select_from_incremental_sync_all_columns_diff_data_types.sql": select_from_incremental_sync_all_columns_diff_data_types_sql,
            "select_from_incremental_sync_all_columns_diff_data_types_target.sql": select_from_incremental_sync_all_columns_diff_data_types_target_sql,
        }

    def list_tests_and_assert(self, include, exclude, expected_tests):
        list_args = ["ls", "--resource-type", "test"]
        if include:
            list_args.extend(("--select", include))
        if exclude:
            list_args.extend(("--exclude", exclude))
        listed = run_dbt(list_args)
        print(listed)
        assert len(listed) == len(expected_tests)
        test_names = [name.split(".")[-1] for name in listed]
        assert sorted(test_names) == sorted(expected_tests)

    def run_tests_and_assert(
        self, project, include, exclude, expected_tests, compare_source, compare_target
    ):
        run_args = ["run"]
        if include:
            run_args.extend(("--models", include))
        results_one = run_dbt(run_args)
        results_two = run_dbt(run_args)

        assert len(results_one) == 3
        assert len(results_two) == 3

        test_args = ["test"]
        if include:
            test_args.extend(("--models", include))
        if exclude:
            test_args.extend(("--exclude", exclude))

        results = run_dbt(test_args)
        tests_run = [r.node.name for r in results]
        assert len(tests_run) == len(expected_tests)
        assert sorted(tests_run) == sorted(expected_tests)
        check_relations_equal(project.adapter, [compare_source, compare_target])

    def run_incremental_ignore(self, project):
        select = "model_a incremental_ignore incremental_ignore_target"
        compare_source = "incremental_ignore"
        compare_target = "incremental_ignore_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_ignore",
            "select_from_incremental_ignore_target",
            "unique_model_a_id",
            "unique_incremental_ignore_id",
            "unique_incremental_ignore_target_id",
        ]

        self.list_tests_and_assert(select, exclude, expected)
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_append_new_columns(self, project):
        select = "model_a incremental_append_new_columns incremental_append_new_columns_target"
        compare_source = "incremental_append_new_columns"
        compare_target = "incremental_append_new_columns_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_append_new_columns",
            "select_from_incremental_append_new_columns_target",
            "unique_model_a_id",
            "unique_incremental_append_new_columns_id",
            "unique_incremental_append_new_columns_target_id",
        ]
        self.list_tests_and_assert(select, exclude, expected)
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_append_new_columns_remove_one(self, project):
        select = "model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target"
        compare_source = "incremental_append_new_columns_remove_one"
        compare_target = "incremental_append_new_columns_remove_one_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_append_new_columns_remove_one",
            "select_from_incremental_append_new_columns_remove_one_target",
            "unique_model_a_id",
            "unique_incremental_append_new_columns_remove_one_id",
            "unique_incremental_append_new_columns_remove_one_target_id",
        ]
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_sync_all_columns(self, project):
        select = "model_a incremental_sync_all_columns incremental_sync_all_columns_target"
        compare_source = "incremental_sync_all_columns"
        compare_target = "incremental_sync_all_columns_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_sync_all_columns",
            "select_from_incremental_sync_all_columns_target",
            "unique_model_a_id",
            "unique_incremental_sync_all_columns_id",
            "unique_incremental_sync_all_columns_target_id",
        ]
        self.list_tests_and_assert(select, exclude, expected)
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_sync_all_columns_quoted(self, project):
        select = "model_a incremental_sync_all_columns_quoted incremental_sync_all_columns_quoted_target"
        compare_source = "incremental_sync_all_columns_quoted"
        compare_target = "incremental_sync_all_columns_quoted_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_sync_all_columns_quoted",
            "select_from_incremental_sync_all_columns_quoted_target",
            "unique_model_a_id",
            "unique_incremental_sync_all_columns_quoted_id",
            "unique_incremental_sync_all_columns_quoted_target_id",
        ]
        self.list_tests_and_assert(select, exclude, expected)
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_sync_all_columns_data_type_change(self, project):
        select = "model_a incremental_sync_all_columns_diff_data_types incremental_sync_all_columns_diff_data_types_target"
        compare_source = "incremental_sync_all_columns_diff_data_types"
        compare_target = "incremental_sync_all_columns_diff_data_types_target"
        exclude = None
        expected = [
            "select_from_a",
            "select_from_incremental_sync_all_columns_diff_data_types",
            "select_from_incremental_sync_all_columns_diff_data_types_target",
            "unique_model_a_id",
            "unique_incremental_sync_all_columns_diff_data_types_id",
            "unique_incremental_sync_all_columns_diff_data_types_target_id",
        ]
        self.list_tests_and_assert(select, exclude, expected)
        self.run_tests_and_assert(
            project, select, exclude, expected, compare_source, compare_target
        )

    def run_incremental_fail_on_schema_change(self, _):
        select = "model_a incremental_fail"
        run_dbt(["run", "--models", select, "--full-refresh"])
        results_two = run_dbt(["run", "--models", select], expect_pass=False)
        assert "Compilation Error" in results_two[1].message

    def test_run_incremental_ignore(self, project):
        self.run_incremental_ignore(project)

    def test_run_incremental_append_new_columns(self, project):
        self.run_incremental_append_new_columns(project)
        self.run_incremental_append_new_columns_remove_one(project)

    def test_run_incremental_sync_all_columns(self, project):
        self.run_incremental_sync_all_columns(project)
        self.run_incremental_sync_all_columns_quoted(project)

    def test_run_incremental_sync_all_columns_data_type_change(self, project):
        self.run_incremental_sync_all_columns_data_type_change(project)

    def test_run_incremental_fail_on_schema_change(self, project):
        self.run_incremental_fail_on_schema_change(project)


@pytest.mark.iceberg
class TestIcebergOnSchemaChange(OnSchemaChangeBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "on_schema_change_iceberg",
            "models": {"+incremental_strategy": "merge"},
        }


@pytest.mark.delta
class TestDeltaOnSchemaChange(OnSchemaChangeBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "on_schema_change_delta",
            "models": {
                "+on_table_exists": "drop",
                "+incremental_strategy": "merge",
            },
        }

    @pytest.mark.xfail(reason="This connector does not support dropping columns")
    def test_run_incremental_sync_all_columns(self, project):
        super(TestDeltaOnSchemaChange, self).test_run_incremental_sync_all_columns(project)

    @pytest.mark.xfail(reason="This connector does not support dropping columns")
    def test_run_incremental_sync_all_columns_data_type_change(self, project):
        super(
            TestDeltaOnSchemaChange, self
        ).test_run_incremental_sync_all_columns_data_type_change(project)


================================================
FILE: tests/functional/adapter/materialization/test_incremental_views_enabled.py
================================================
import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture

from tests.functional.adapter.materialization.fixtures import model_sql, seed_csv


class BaseViewsEnabled:
    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "materialization.sql": model_sql,
        }


class TestViewsEnabledTrue(BaseViewsEnabled):
    """
    Testing without views_enabled config specified, which defaults to views_enabled = True configuration
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "views_enabled_true",
            "models": {"+materialized": "incremental"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'''create or replace view
    "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'''
            in logs
        )


class TestViewsEnabledFalse(BaseViewsEnabled):
    """
    Testing views_enabled = False configuration for incremental materialization
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "views_enabled_false",
            "models": {"+materialized": "incremental", "+views_enabled": False},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'
            in logs
        )


================================================
FILE: tests/functional/adapter/materialization/test_materialized_view.py
================================================
import pytest
from dbt.tests.util import (
    check_relation_types,
    check_relations_equal,
    run_dbt,
    run_dbt_and_capture,
    run_sql_with_adapter,
)

from tests.functional.adapter.materialization.fixtures import (
    model_cte_sql,
    model_sql,
    seed_csv,
)


# TODO: teardown_method is needed to properly remove relations and schemas after tests.
#  It could be refactored and simplified when CASCADE will be supported in Iceberg, delta, hive connectors
@pytest.mark.iceberg
class TestIcebergMaterializedViewBase:
    @pytest.fixture(scope="function", autouse=True)
    def teardown_method(self, project):
        yield
        # Drop materialized views first, then drop schema
        sql = "select * from system.metadata.materialized_views"
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")
        for mv in results:
            project.run_sql(f"drop materialized view {mv[0]}.{mv[1]}.{mv[2]}")

        relation = project.adapter.Relation.create(
            database=project.database, schema=project.test_schema
        )
        project.adapter.drop_schema(relation)


@pytest.mark.iceberg
class TestIcebergMaterializedViewExists(TestIcebergMaterializedViewBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "materialized_view",
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "my_view.sql": "select 1 a",
            "my_table.sql": """ {{
    config(materialized='table')
}}
select 1 a""",
        }

    def test_mv_is_dropped_when_model_runs_view(self, project):
        project.adapter.execute("CREATE OR REPLACE MATERIALIZED VIEW my_view AS SELECT 2 b")
        project.adapter.execute("CREATE OR REPLACE MATERIALIZED VIEW my_table AS SELECT 2 b")

        # check relation types
        expected = {
            "my_table": "materialized_view",
            "my_view": "materialized_view",
        }
        check_relation_types(project.adapter, expected)

        model_count = len(run_dbt(["run"]))
        assert model_count == 2

        # check relation types
        expected = {
            "my_view": "view",
            "my_table": "table",
        }
        check_relation_types(project.adapter, expected)


@pytest.mark.iceberg
class TestIcebergMaterializedViewWithCTE(TestIcebergMaterializedViewBase):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "mv_cte_test",
            "models": {
                "+materialized": "materialized_view",
            },
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "mat_view.sql": model_cte_sql,
        }

    def test_mv_with_cte_is_created(self, project):
        # Create MV
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1


@pytest.mark.iceberg
class TestIcebergMaterializedViewCreate(TestIcebergMaterializedViewBase):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "mv_test",
            "models": {
                "+materialized": "materialized_view",
            },
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "mat_view.sql": model_sql,
        }

    def test_mv_is_created_and_refreshed(self, project):
        catalog = project.adapter.config.credentials.database
        schema = project.adapter.config.credentials.schema

        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create MV
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # Check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "mat_view"])

        # Add one row to seed
        sql = f"""INSERT INTO {catalog}.{schema}.seed
        VALUES (5, 'Mateo', timestamp '2014-09-07 17:04:27')"""
        run_sql_with_adapter(project.adapter, sql, fetch="all")

        # Refresh MV
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # Check if one row is added in MV
        sql = f"select * from {catalog}.{schema}.mat_view"
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")
        assert len(results) == 5


@pytest.mark.iceberg
class TestIcebergMaterializedViewDropAndCreate(TestIcebergMaterializedViewBase):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "mv_test",
            "models": {
                "+materialized": "materialized_view",
                "+full_refresh": True,
            },
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "mat_view_overrides_table.sql": model_sql,
            "mat_view_overrides_view.sql": model_sql,
            "mat_view_overrides_materialized_view.sql": model_sql,
        }

    def test_mv_overrides_relation(self, project):
        # Create relation with same name
        project.adapter.execute("CREATE VIEW mat_view_overrides_view AS SELECT 3 c")
        project.adapter.execute("CREATE TABLE mat_view_overrides_table AS SELECT 4 d")
        project.adapter.execute(
            "CREATE MATERIALIZED VIEW mat_view_overrides_materialized_view AS SELECT 5 e"
        )

        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create MVs, already existing relations with same name should be dropped
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 3

        # Check if MVs were created correctly
        expected = {
            "mat_view_overrides_view": "materialized_view",
            "mat_view_overrides_table": "materialized_view",
            "mat_view_overrides_materialized_view": "materialized_view",
        }
        check_relation_types(project.adapter, expected)

        check_relations_equal(
            project.adapter,
            [
                "seed",
                "mat_view_overrides_view",
                "mat_view_overrides_table",
                "mat_view_overrides_materialized_view",
            ],
        )


@pytest.mark.iceberg
@pytest.mark.skip_profile("starburst_galaxy")
class TestIcebergMaterializedViewProperties(TestIcebergMaterializedViewBase):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "mv_test",
            "models": {
                "+materialized": "materialized_view",
                "+properties": {"format": "'PARQUET'"},
            },
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "mat_view.sql": model_sql,
        }

    def test_set_mv_properties(self, project):
        catalog = project.adapter.config.credentials.database
        schema = project.adapter.config.credentials.schema

        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create MV
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # Retrieve MV properties
        sql = f"SHOW CREATE MATERIALIZED VIEW {catalog}.{schema}.mat_view"
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")
        assert "format = 'PARQUET'" in results[0][0]


@pytest.mark.iceberg
class TestIcebergMaterializedViewWithGracePeriod(TestIcebergMaterializedViewBase):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "mv_test",
            "models": {
                "+materialized": "materialized_view",
                "+grace_period": "INTERVAL '3' SECOND",
            },
        }

    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "mat_view.sql": model_sql,
        }

    def test_set_mv_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create MV
        results, log_output = run_dbt_and_capture(["run", "--debug"], expect_pass=True)
        assert len(results) == 1
        assert "grace period INTERVAL '3' SECOND" in log_output

        # Check if MVs were created correctly
        check_relation_types(project.adapter, {"mat_view": "materialized_view"})


================================================
FILE: tests/functional/adapter/materialization/test_on_table_exists.py
================================================
import pytest
from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture

from tests.functional.adapter.materialization.fixtures import (
    model_sql,
    profile_yml,
    seed_csv,
)


class BaseOnTableExists:
    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "materialization.sql": model_sql,
            "materialization.yml": profile_yml,
        }


class TestOnTableExistsRename(BaseOnTableExists):
    """
    Testing on_table_exists = `rename` configuration for table materialization,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_rename",
            "models": {"+materialized": "table", "+on_table_exists": "rename"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'rename'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table "{project.database}"."{project.test_schema}"."materialization"' in logs
        )
        assert "alter table" not in logs
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'
            in logs
        )
        assert (
            f'alter table "{project.database}"."{project.test_schema}"."materialization" rename to "{project.database}"."{project.test_schema}"."materialization__dbt_backup"'
            in logs
        )
        assert (
            f'alter table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp" rename to "{project.database}"."{project.test_schema}"."materialization"'
            in logs
        )
        assert (
            f'drop table if exists "{project.database}"."{project.test_schema}"."materialization__dbt_backup"'
            in logs
        )
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


class TestOnTableExistsRenameIncrementalFullRefresh(BaseOnTableExists):
    """
    Testing on_table_exists = `rename` configuration for incremental materialization and full refresh flag,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_rename",
            "models": {"+materialized": "incremental", "+on_table_exists": "rename"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'rename'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert (
            f'create table "{project.database}"."{project.test_schema}"."materialization"' in logs
        )
        assert "alter table" not in logs
        results, logs = run_dbt_and_capture(["--debug", "run", "--full-refresh"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'
            in logs
        )
        assert (
            f'alter table "{project.database}"."{project.test_schema}"."materialization" rename to "{project.database}"."{project.test_schema}"."materialization__dbt_backup"'
            in logs
        )
        assert (
            f'alter table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp" rename to "{project.database}"."{project.test_schema}"."materialization"'
            in logs
        )
        assert (
            f'drop table if exists "{project.database}"."{project.test_schema}"."materialization__dbt_backup"'
            in logs
        )
        assert "create or replace view" not in logs
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


class TestOnTableExistsDrop(BaseOnTableExists):
    """
    Testing on_table_exists = `drop` configuration for table materialization,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_drop",
            "models": {"+materialized": "table", "+on_table_exists": "drop"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'drop'
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


class TestOnTableExistsDropIncrementalFullRefresh(BaseOnTableExists):
    """
    Testing on_table_exists = `drop` configuration for incremental materialization and full refresh flag,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_drop",
            "models": {"+materialized": "incremental", "+on_table_exists": "drop"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'drop'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'drop table if exists "{project.database}"."{project.test_schema}"."materialization"'
            not in logs
        )
        results, logs = run_dbt_and_capture(["--debug", "run", "--full-refresh"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'drop table if exists "{project.database}"."{project.test_schema}"."materialization"'
            in logs
        )
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


class BaseOnTableExistsReplace(BaseOnTableExists):
    """
    Testing on_table_exists = `replace` configuration for table materialization,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_drop",
            "models": {"+materialized": "table", "+on_table_exists": "replace"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'replace'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "create or replace table" in logs
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "create or replace table" in logs
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


@pytest.mark.iceberg
class TestOnTableExistsReplaceIceberg(BaseOnTableExistsReplace):
    pass


@pytest.mark.delta
class TestOnTableExistsReplaceDelta(BaseOnTableExistsReplace):
    pass


class BaseOnTableExistsReplaceIncrementalFullRefresh(BaseOnTableExists):
    """
    Testing on_table_exists = `replace` configuration for incremental materialization and full refresh flag,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_drop",
            "models": {"+materialized": "incremental", "+on_table_exists": "replace"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'replace'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "create or replace table" not in logs
        results, logs = run_dbt_and_capture(["--debug", "run", "--full-refresh"], expect_pass=True)
        assert len(results) == 1
        assert "create or replace table" in logs
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


@pytest.mark.iceberg
class TestOnTableExistsReplaceIcebergIncrementalFullRefresh(
    BaseOnTableExistsReplaceIncrementalFullRefresh
):
    pass


@pytest.mark.delta
class TestOnTableExistsReplaceDeltaIncrementalFullRefresh(
    BaseOnTableExistsReplaceIncrementalFullRefresh
):
    pass


class TestOnTableExistsSkip(BaseOnTableExists):
    """
    Testing on_table_exists = `skip` configuration for table materialization,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "table_rename",
            "models": {"+materialized": "table", "+on_table_exists": "skip"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models two times to check on_table_exists = 'skip'
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table if not exists "{project.database}"."{project.test_schema}"."materialization"'
            in logs
        )
        assert "alter table" not in logs
        assert "drop table" not in logs
        assert "or replace" not in logs
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert (
            f'create table if not exists "{project.database}"."{project.test_schema}"."materialization"'
            in logs
        )
        assert "alter table" not in logs
        assert "drop table" not in logs
        assert "or replace" not in logs
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


================================================
FILE: tests/functional/adapter/materialization/test_prepared_statements.py
================================================
import pytest
from dbt.tests.util import check_relations_equal, run_dbt

from tests.functional.adapter.materialization.fixtures import (
    model_sql,
    profile_yml,
    seed_csv,
)


class PreparedStatementsBase:
    """
    Testing prepared_statements_enabled profile configuration using dbt
    seed, run and tests commands and validate data load correctness.
    """

    # configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "test_prepared_statements",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "materialization.sql": model_sql,
            "materialization.yml": profile_yml,
        }

    def retrieve_num_prepared_statements(self, trino_connection):
        cur = trino_connection.cursor()
        cur.execute("select query from system.runtime.queries order by query_id desc limit 3")
        result = cur.fetchall()
        return len(list(filter(lambda rec: "EXECUTE" in rec[0], result)))

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def run_seed_with_prepared_statements(
        self, project, trino_connection, expected_num_prepared_statements
    ):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Check if the seed command is using prepared statements
        assert (
            self.retrieve_num_prepared_statements(trino_connection)
            == expected_num_prepared_statements
        )

        # run models
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


@pytest.mark.prepared_statements_disabled
@pytest.mark.skip_profile("starburst_galaxy")
class TestPreparedStatementsDisabled(PreparedStatementsBase):
    def test_run_seed_with_prepared_statements_disabled(self, project, trino_connection):
        self.run_seed_with_prepared_statements(project, trino_connection, 0)


@pytest.mark.skip_profile("starburst_galaxy")
class TestPreparedStatementsEnabled(PreparedStatementsBase):
    def test_run_seed_with_prepared_statements_enabled(self, project, trino_connection):
        self.run_seed_with_prepared_statements(project, trino_connection, 1)


================================================
FILE: tests/functional/adapter/materialization/test_snapshot.py
================================================
import pytest
from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols
from dbt.tests.adapter.basic.test_snapshot_timestamp import (
    BaseSnapshotTimestamp,
    check_relation_rows,
)
from dbt.tests.util import get_relation_columns, run_dbt

seeds_base_csv = """
id,name,some_date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
5,Hannah,1982-06-23 05:41:26
6,Eleanor,1991-08-10 23:12:21
7,Lily,1971-03-29 14:58:02
8,Jonathan,1988-02-26 02:55:24
9,Adrian,1994-02-09 13:14:23
10,Nora,1976-03-01 16:51:39
""".lstrip()


seeds_added_csv = (
    seeds_base_csv
    + """
11,Mateo,2014-09-07 17:04:27
12,Julian,2000-02-04 11:48:30
13,Gabriel,2001-07-10 07:32:52
14,Isaac,2002-11-24 03:22:28
15,Levi,2009-11-15 11:57:15
16,Elizabeth,2005-04-09 03:50:11
17,Grayson,2019-08-06 19:28:17
18,Dylan,2014-03-01 11:50:41
19,Jayden,2009-06-06 07:12:49
20,Luke,2003-12-05 21:42:18
""".lstrip()
)

seeds_newcolumns_csv = """
id,name,some_date,last_initial,new_date
1,Easton,1981-05-20 06:46:51,A,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33,B,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51,C,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35,D,1976-05-06 20:21:35
5,Hannah,1982-06-23 05:41:26,E,1982-06-23 05:41:26
6,Eleanor,1991-08-10 23:12:21,F,1991-08-10 23:12:21
7,Lily,1971-03-29 14:58:02,G,1971-03-29 14:58:02
8,Jonathan,1988-02-26 02:55:24,H,1988-02-26 02:55:24
9,Adrian,1994-02-09 13:14:23,I,1994-02-09 13:14:23
10,Nora,1976-03-01 16:51:39,J,1976-03-01 16:51:39
""".lstrip()

iceberg_macro_override_sql = """\
{% macro trino__current_timestamp() -%}
    current_timestamp(6)
{%- endmacro %}
"""


class BaseTrinoSnapshotTimestamp(BaseSnapshotTimestamp):
    def test_snapshot_timestamp(self, project):
        super().test_snapshot_timestamp(project)

        run_dbt(["snapshot", "--vars", "seed_name: newcolumns"])

        # snapshot still has 30 rows because timestamp not updated
        check_relation_rows(project, "ts_snapshot", 30)

        # snapshot now has an additional column "last_initial"
        ts_snapshot_columns = map(
            lambda x: x[0], get_relation_columns(project.adapter, "ts_snapshot")
        )
        assert "last_initial" in ts_snapshot_columns
        assert "new_date" in ts_snapshot_columns


@pytest.mark.iceberg
class TestIcebergSnapshotCheckColsTrino(BaseSnapshotCheckCols):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "snapshot_strategy_check_cols",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
            "added.csv": seeds_added_csv,
        }

    @pytest.fixture(scope="class")
    def macros(self):
        return {"iceberg.sql": iceberg_macro_override_sql}


@pytest.mark.iceberg
class TestIcebergSnapshotTimestampTrino(BaseTrinoSnapshotTimestamp):
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
            "newcolumns.csv": seeds_newcolumns_csv,
            "added.csv": seeds_added_csv,
        }

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "snapshot_strategy_timestamp",
            "seeds": {
                "+column_types": {
                    "some_date": "timestamp(6)",
                    "new_date": "timestamp(6)",
                },
            },
        }


@pytest.mark.delta
class TestDeltaSnapshotCheckColsTrino(BaseSnapshotCheckCols):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "snapshot_strategy_check_cols",
            "seeds": {
                "+column_types": {
                    "some_date": "timestamp(3) with time zone",
                    "new_date": "timestamp(3) with time zone",
                },
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
            "added.csv": seeds_added_csv,
        }


@pytest.mark.delta
class TestDeltaSnapshotTimestampTrino(BaseTrinoSnapshotTimestamp):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "snapshot_strategy_timestamp",
            "seeds": {
                "+column_types": {
                    "some_date": "timestamp(3) with time zone",
                    "new_date": "timestamp(3) with time zone",
                },
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
            "newcolumns.csv": seeds_newcolumns_csv,
            "added.csv": seeds_added_csv,
        }


class TestSnapshotLocationPropertyExceptionTrino(BaseSnapshotCheckCols):
    """
    Specifying 'location' property in snapshots is not supported.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "snapshot_location_property_exception",
            "snapshots": {
                "+properties": {
                    "location": "some_location",
                },
            },
        }

    def test_snapshot_check_cols(self, project):
        results = run_dbt(["snapshot"], expect_pass=False)
        for result in results:
            assert result.status == "error"
            assert "Specifying 'location' property in snapshots is not supported" in result.message


================================================
FILE: tests/functional/adapter/materialization/test_view_security.py
================================================
import pytest
from dbt.tests.util import check_relations_equal, run_dbt

from tests.functional.adapter.materialization.fixtures import (
    model_sql,
    profile_yml,
    seed_csv,
)


class TestViewSecurity:
    """
    Testing view_security = 'invoker' configuration for view materialization,
    using dbt seed, run and tests commands and validate data load correctness.
    """

    # configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "view_security",
            "models": {"+materialized": "view", "+view_security": "invoker"},
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "materialization.sql": model_sql,
            "materialization.yml": profile_yml,
        }

    # The actual sequence of dbt commands and assertions
    # pytest will take care of all "setup" + "teardown"
    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        # run models
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 3

        # check if the data was loaded correctly
        check_relations_equal(project.adapter, ["seed", "materialization"])


================================================
FILE: tests/functional/adapter/materialized_view_tests/test_materialized_view_dbt_core.py
================================================
from typing import Optional, Tuple

import pytest
from dbt.adapters.base.relation import BaseRelation
from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic
from dbt.tests.util import get_model_file, run_dbt, run_sql_with_adapter, set_model_file

from tests.functional.adapter.materialized_view_tests.utils import query_relation_type


@pytest.mark.iceberg
class TestTrinoMaterializedViewsBasic(MaterializedViewBasic):
    @staticmethod
    def insert_record(project, table: BaseRelation, record: Tuple[int, int]):
        my_id, value = record
        project.run_sql(f"insert into {table} (id, value) values ({my_id}, {value})")

    @staticmethod
    def refresh_materialized_view(project, materialized_view: BaseRelation):
        sql = f"refresh materialized view {materialized_view}"
        project.run_sql(sql)

    @staticmethod
    def query_row_count(project, relation: BaseRelation) -> int:
        sql = f"select count(*) from {relation}"
        return project.run_sql(sql, fetch="one")[0]

    @staticmethod
    def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
        return query_relation_type(project, relation)

    # TODO: remove `setup` fixture when CASCADE will be supported in Iceberg, delta, hive connectors
    @pytest.fixture(scope="function", autouse=True)
    def setup(self, project, my_materialized_view):
        run_dbt(["seed"])
        run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"])

        # the tests touch these files, store their contents in memory
        initial_model = get_model_file(project, my_materialized_view)

        yield

        # and then reset them after the test runs
        set_model_file(project, my_materialized_view, initial_model)

        # Drop materialized views first, then drop schema
        sql = "select * from system.metadata.materialized_views"
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")
        for mv in results:
            project.run_sql(f"drop materialized view {mv[0]}.{mv[1]}.{mv[2]}")

        relation = project.adapter.Relation.create(
            database=project.database, schema=project.test_schema
        )
        project.adapter.drop_schema(relation)

    @pytest.mark.skip(
        reason="""
    on iceberg:
    If the data is outdated, the materialized view behaves like a normal view,
    and the data is queried directly from the base tables.
    https://trino.io/docs/current/connector/iceberg.html#materialized-views
    """
    )
    def test_materialized_view_only_updates_after_refresh(self):
        pass


================================================
FILE: tests/functional/adapter/materialized_view_tests/utils.py
================================================
from typing import Optional

from dbt.adapters.base.relation import BaseRelation

from dbt.adapters.trino.relation import TrinoRelation


def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
    assert isinstance(relation, TrinoRelation)
    sql = f"""
    select
      case when mv.name is not null then 'materialized_view'
           when t.table_type = 'BASE TABLE' then 'table'
           when t.table_type = 'VIEW' then 'view'
           else t.table_type
      end as table_type
    from {relation.information_schema()}.tables t
    left join system.metadata.materialized_views mv
          on mv.catalog_name = t.table_catalog and mv.schema_name = t.table_schema and mv.name = t.table_name
    where t.table_schema = '{relation.schema.lower()}'
          and (mv.catalog_name is null or mv.catalog_name =  '{relation.database.lower()}')
          and (mv.schema_name is null or mv.schema_name =  '{relation.schema.lower()}')
          and t.table_name = '{relation.identifier.lower()}'
    """
    results = project.run_sql(sql, fetch="all")
    if len(results) == 0:
        return None
    elif len(results) > 1:
        raise ValueError(f"More than one instance of {relation.name} found!")
    else:
        return results[0][0]


================================================
FILE: tests/functional/adapter/persist_docs/fixtures.py
================================================
seed_csv = """
id,name,date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
""".lstrip()

table_model = """
{{config(materialized = "table")}}
select * from {{ ref('seed') }}
"""

view_model = """
{{config(materialized = "view")}}
select * from {{ ref('seed') }}
"""

incremental_model = """
{{config(materialized = "incremental")}}
select * from {{ ref('seed') }}
"""

table_profile_yml = """
version: 2
models:
  - name: table_model
    description: |
      Table model description "with double quotes"
      and with 'single  quotes' as well as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        description: |
          id Column description "with double quotes"
          and with 'single  quotes' as well as other;
          '''abc123'''
          reserved -- characters
          --
          /* comment */
          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
        tests:
          - unique
          - not_null
      - name: name
        description: |
          Fancy column description
        tests:
          - not_null
seeds:
  - name: seed
    description: |
      Seed model description "with double quotes"
      and with 'single  quotes' as welll as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        description: |
          id Column description "with double quotes"
          and with 'single  quotes' as welll as other;
          '''abc123'''
          reserved -- characters
          --
          /* comment */
          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
      - name: name
        description: |
          Fancy column description
        tests:
          - not_null
"""


view_profile_yml = """
version: 2
models:
  - name: view_model
    description: |
      Table model description "with double quotes"
      and with 'single  quotes' as well as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        tests:
          - unique
          - not_null
        description: ID Column description
      - name: name
        tests:
          - not_null
        description: Name description
seeds:
  - name: seed
    description: |
      Seed model description "with double quotes"
      and with 'single  quotes' as welll as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        description: |
          id Column description "with double quotes"
          and with 'single  quotes' as welll as other;
          '''abc123'''
          reserved -- characters
          --
          /* comment */
          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
      - name: name
        description: |
          Fancy column description
        tests:
          - not_null
"""

incremental_profile_yml = """
version: 2
models:
  - name: incremental_model
    description: |
      Table model description "with double quotes"
      and with 'single  quotes' as well as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        description: |
          id Column description "with double quotes"
          and with 'single  quotes' as well as other;
          '''abc123'''
          reserved -- characters
          --
          /* comment */
          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
        tests:
          - unique
          - not_null
      - name: name
        description: |
          Fancy column description
        tests:
          - not_null
seeds:
  - name: seed
    description: |
      Seed model description "with double quotes"
      and with 'single  quotes' as welll as other;
      '''abc123'''
      reserved -- characters
      --
      /* comment */
      Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
    columns:
      - name: id
        description: |
          id Column description "with double quotes"
          and with 'single  quotes' as welll as other;
          '''abc123'''
          reserved -- characters
          --
          /* comment */
          Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
      - name: name
        description: |
          Fancy column description
        tests:
          - not_null
"""


================================================
FILE: tests/functional/adapter/persist_docs/test_persist_docs.py
================================================
import pytest
from dbt.tests.adapter.persist_docs.test_persist_docs import (
    BasePersistDocs,
    BasePersistDocsBase,
    BasePersistDocsColumnMissing,
    BasePersistDocsCommentOnQuotedColumn,
)
from dbt.tests.util import run_dbt, run_sql_with_adapter

from tests.functional.adapter.persist_docs.fixtures import (
    incremental_model,
    incremental_profile_yml,
    seed_csv,
    table_model,
    table_profile_yml,
    view_model,
    view_profile_yml,
)


@pytest.mark.iceberg
class TestPersistDocsBase:
    """
    Testing persist_docs functionality
    """

    @property
    def schema(self):
        return "default"

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }


class TestPersistDocsTable(TestPersistDocsBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "persist_docs_tests",
            "models": {"+persist_docs": {"relation": True, "columns": True}},
            "seeds": {
                "+column_types": {"date": "timestamp(6)"},
                "+persist_docs": {"relation": True, "columns": True},
            },
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "table_model.sql": table_model,
            "table_persist_docs.yml": table_profile_yml,
        }

    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 4


class TestPersistDocsView(TestPersistDocsBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "persist_docs_tests",
            "models": {
                "+persist_docs": {"relation": True, "columns": True},
                "+materialized": "view",
                "+view_security": "definer",
            },
            "seeds": {
                "+column_types": {"date": "timestamp(6)"},
                "+persist_docs": {"relation": True, "columns": True},
            },
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "view_model.sql": view_model,
            "view_persist_docs.yml": view_profile_yml,
        }

    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 4


class TestPersistDocsIncremental(TestPersistDocsBase):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "persist_docs_tests",
            "models": {"+persist_docs": {"relation": True, "columns": True}},
            "seeds": {
                "+column_types": {"date": "timestamp(6)"},
                "+persist_docs": {"relation": True, "columns": True},
            },
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "incremental_model.sql": incremental_model,
            "incremental_persist_docs.yml": incremental_profile_yml,
        }

    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 4


# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'
# results in an error. This happens only for views.
@pytest.mark.skip_profile("starburst_galaxy")
class TestPersistDocs(BasePersistDocs):
    pass


class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing):
    pass


class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn):
    pass


class BasePersistDocsDisabled(BasePersistDocsBase):
    def test_persist_docs_disabled(self, project):
        sql = f"""select * from system.metadata.table_comments
        where catalog_name = '{project.database}'
        and schema_name = '{project.test_schema}'
        and table_name = 'table_model'
        and comment is not null
        """
        result = run_sql_with_adapter(project.adapter, sql, fetch="all")
        assert len(result) == 0


class TestPersistDocsDisabledByDefault(BasePersistDocsDisabled):
    """
    Without providing `persist_docs` config, table comments shouldn't be added by default.
    """

    pass


# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'
# results in an error. This happens only for views.
@pytest.mark.skip_profile("starburst_galaxy")
class TestPersistDocsRelationSetToFalse(BasePersistDocsDisabled):
    """
    With `persist_docs.relation` config set to False, table comments shouldn't be added.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "test": {
                    "+persist_docs": {
                        "relation": False,
                        "columns": True,
                    },
                }
            }
        }


# TODO: Trying to COMMENT ON COLUMN with a comment that includes '*/'
# results in an error. This happens only for views.
@pytest.mark.skip_profile("starburst_galaxy")
class TestPersistDocsRelationNotSet(BasePersistDocsDisabled):
    """
    Without providing `persist_docs.relation` config, table comments shouldn't be added by default.
    """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {
                "test": {
                    "+persist_docs": {
                        "columns": True,
                    },
                }
            }
        }


================================================
FILE: tests/functional/adapter/show/fixtures.py
================================================
models__sample_model = """
select * from {{ ref('sample_seed') }}
"""

models__second_model = """
select
    sample_num as col_one,
    sample_bool as col_two,
    42 as answer
from {{ ref('sample_model') }}
"""

models__sql_header = """
{% call set_sql_header(config) %}
set time zone 'Asia/Kolkata';
{%- endcall %}
select current_timezone() as timezone
"""

private_model_yml = """
groups:
  - name: my_cool_group
    owner: {name: me}

models:
  - name: private_model
    access: private
    config:
      group: my_cool_group
"""


schema_yml = """
models:
  - name: sample_model
    latest_version: 1

    # declare the versions, and fully specify them
    versions:
      - v: 2
        config:
          materialized: table
        columns:
          - name: sample_num
            data_type: int
          - name: sample_bool
            data_type: boolean
          - name: answer
            data_type: int

      - v: 1
        config:
          materialized: table
          contract: {enforced: true}
        columns:
          - name: sample_num
            data_type: int
          - name: sample_bool
            data_type: boolean
"""

models__ephemeral_model = """
{{ config(materialized = 'ephemeral') }}
select
    coalesce(sample_num, 0) + 10 as col_deci
from {{ ref('sample_model') }}
"""

models__second_ephemeral_model = """
{{ config(materialized = 'ephemeral') }}
select
    col_deci + 100 as col_hundo
from {{ ref('ephemeral_model') }}
"""

seeds__sample_seed = """sample_num,sample_bool
1,true
2,false
3,true
4,false
5,true
6,false
7,true
"""


================================================
FILE: tests/functional/adapter/show/test_show.py
================================================
import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture
from dbt_common.exceptions import DbtBaseException, DbtRuntimeError

from tests.functional.adapter.show.fixtures import (
    models__ephemeral_model,
    models__sample_model,
    models__second_ephemeral_model,
    models__second_model,
    models__sql_header,
    private_model_yml,
    schema_yml,
    seeds__sample_seed,
)


class TestShow:
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "sample_model.sql": models__sample_model,
            "second_model.sql": models__second_model,
            "ephemeral_model.sql": models__ephemeral_model,
            "sql_header.sql": models__sql_header,
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"sample_seed.csv": seeds__sample_seed}

    def test_none(self, project):
        with pytest.raises(
            DbtRuntimeError, match="Either --select or --inline must be passed to show"
        ):
            run_dbt(["seed"])
            run_dbt(["show"])

    def test_select_model_text(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(["show", "--select", "second_model"])
        assert "Previewing node 'sample_model'" not in log_output
        assert "Previewing node 'second_model'" in log_output
        assert "col_one" in log_output
        assert "col_two" in log_output
        assert "answer" in log_output

    def test_select_multiple_model_text(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(
            ["show", "--select", "sample_model second_model"]
        )
        assert "Previewing node 'sample_model'" in log_output
        assert "sample_num" in log_output
        assert "sample_bool" in log_output

    def test_select_single_model_json(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(
            ["show", "--select", "sample_model", "--output", "json"]
        )
        assert "Previewing node 'sample_model'" not in log_output
        assert "sample_num" in log_output
        assert "sample_bool" in log_output

    def test_inline_pass(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(
            ["show", "--inline", "select * from {{ ref('sample_model') }}"]
        )
        assert "Previewing inline node" in log_output
        assert "sample_num" in log_output
        assert "sample_bool" in log_output

    def test_inline_fail(self, project):
        with pytest.raises(DbtBaseException, match="Error parsing inline query"):
            run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"])

    def test_inline_fail_database_error(self, project):
        with pytest.raises(DbtRuntimeError, match="Database Error"):
            run_dbt(["show", "--inline", "slect asdlkjfsld;j"])

    def test_ephemeral_model(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
        assert "col_deci" in log_output

    def test_second_ephemeral_model(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(
            ["show", "--inline", models__second_ephemeral_model]
        )
        assert "col_hundo" in log_output

    # test_limit tests ConnectionWrapper.fetchmany()
    @pytest.mark.parametrize(
        "args,expected",
        [
            ([], 5),  # default limit
            (["--limit", 3], 3),  # fetch 3 rows
            (["--limit", -1], 7),  # fetch all rows
        ],
    )
    def test_limit(self, project, args, expected):
        run_dbt(["build"])
        dbt_args = ["show", "--inline", models__second_ephemeral_model, *args]
        results, log_output = run_dbt_and_capture(dbt_args)
        assert len(results.results[0].agate_table) == expected

    def test_seed(self, project):
        (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"])
        assert "Previewing node 'sample_seed'" in log_output

    def test_sql_header(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(["show", "--select", "sql_header"])
        assert "Asia/Kolkata" in log_output


class TestShowModelVersions:
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "schema.yml": schema_yml,
            "sample_model.sql": models__sample_model,
            "sample_model_v2.sql": models__second_model,
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"sample_seed.csv": seeds__sample_seed}

    def test_version_unspecified(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_model"])
        assert "Previewing node 'sample_model.v1'" in log_output
        assert "Previewing node 'sample_model.v2'" in log_output

    def test_none(self, project):
        run_dbt(["build"])
        (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_model.v2"])
        assert "Previewing node 'sample_model.v1'" not in log_output
        assert "Previewing node 'sample_model.v2'" in log_output


class TestShowPrivateModel:
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "schema.yml": private_model_yml,
            "private_model.sql": models__sample_model,
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"sample_seed.csv": seeds__sample_seed}

    def test_version_unspecified(self, project):
        run_dbt(["build"])
        run_dbt(["show", "--inline", "select * from {{ ref('private_model') }}"])


================================================
FILE: tests/functional/adapter/simple_seed/seed_bom.csv
================================================
seed_id,first_name,email,ip_address,birthday
1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31
2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14
3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57
4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43
5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29
6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50
7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59
8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15
9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48
10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49
11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11
12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14
13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36
14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04
15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56
16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47
17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35
18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57
19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15
20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05
21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49
22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56
23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43
24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44
25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02
26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25
27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18
28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37
29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12
30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23
31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50
32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27
33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44
34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05
35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39
36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52
37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25
38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51
39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29
40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54
41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56
42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31
43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28
44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07
45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15
46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45
47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19
48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24
49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48
50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49
51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12
52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23
53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51
54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19
55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40
56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32
57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09
58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40
59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27
60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06
61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44
62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47
63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37
64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24
65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56
66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35
67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45
68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23
69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21
70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46
71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20
72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54
73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55
74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24
75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30
76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01
77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42
78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33
79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52
80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24
81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56
82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59
83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36
84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27
85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42
86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39
87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49
88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44
89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39
90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04
91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19
92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14
93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49
94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54
95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19
96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10
97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45
98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58
99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55
100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39
101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23
102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36
103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01
104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29
105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20
106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39
107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02
108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03
109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23
110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25
111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21
112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46
113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17
114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51
115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54
116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30
117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42
118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44
119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19
120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15
121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34
122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24
123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21
124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51
125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24
126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34
127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55
128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32
129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43
130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50
131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16
132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35
133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54
134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10
135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52
136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30
137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33
138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44
139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19
140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53
141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23
142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32
143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17
144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41
145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57
146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58
147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32
148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46
149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04
150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45
151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04
152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10
153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00
154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36
155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09
156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54
157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41
158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15
159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38
160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42
161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23
162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34
163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18
164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54
165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46
166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14
167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13
168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27
169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58
170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31
171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44
172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51
173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04
174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38
175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15
176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15
177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46
178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14
179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19
180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14
181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16
182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37
183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25
184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29
185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07
186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45
187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50
188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48
189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37
190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28
191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50
192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09
193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40
194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43
195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20
196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34
197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20
198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29
199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53
200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40
201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26
202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00
203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09
204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33
205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19
206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51
207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26
208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28
209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32
210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22
211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44
212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09
213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07
214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21
215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16
216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40
217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30
218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34
219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56
220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11
221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04
222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02
223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04
224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29
225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56
226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31
227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29
228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10
229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56
230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47
231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01
232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01
233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24
234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56
235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47
236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45
237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20
238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59
239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44
240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41
241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31
242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50
243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17
244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36
245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13
246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54
247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18
248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18
249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44
250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09
251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54
252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50
253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31
254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31
255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00
256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28
257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02
258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34
259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05
260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00
261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37
262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19
263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10
264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40
265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12
266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24
267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19
268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06
269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28
270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31
271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40
272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33
273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35
274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59
275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36
276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06
277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29
278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24
279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57
280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22
281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09
282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29
283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37
284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51
285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43
286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17
287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06
288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57
289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40
290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44
291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33
292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49
293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57
294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21
295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13
296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10
297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08
298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28
299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04
300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31
301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31
302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46
303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13
304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39
305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34
306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05
307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32
308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40
309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21
310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55
311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32
312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33
313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05
314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27
315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26
316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51
317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36
318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08
319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43
320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43
321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50
322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03
323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25
324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56
325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44
326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11
327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55
328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27
329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31
330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24
331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16
332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41
333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52
334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58
335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16
336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07
337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13
338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47
339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43
340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18
341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48
342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49
343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09
344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07
345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14
346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42
347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13
348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06
349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52
350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58
351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06
352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12
353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04
354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58
355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57
356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17
357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24
358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06
359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57
360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34
361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32
362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41
363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59
364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22
365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26
366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27
367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05
368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15
369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36
370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23
371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42
372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01
373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17
374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59
375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58
376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55
377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08
378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01
379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37
380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07
381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02
382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43
383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46
384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18
385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51
386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52
387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49
388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29
389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18
390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50
391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35
392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39
393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18
394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01
395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28
396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55
397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51
398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43
399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35
400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12
401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09
402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49
403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20
404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25
405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03
406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13
407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39
408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26
409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27
410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01
411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07
412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51
413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30
414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08
415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04
416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02
417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41
418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21
419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41
420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35
421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47
422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41
423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19
424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10
425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10
426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28
427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44
428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55
429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20
430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20
431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04
432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49
433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29
434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45
435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34
436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17
437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46
438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36
439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40
440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05
441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04
442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21
443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20
444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23
445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07
446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48
447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01
448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00
449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54
450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36
451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03
452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26
453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03
454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21
455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59
456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59
457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45
458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40
459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43
460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30
461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44
462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06
463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47
464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19
465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54
466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57
467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37
468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47
469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44
470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10
471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04
472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51
473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37
474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37
475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45
476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20
477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06
478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52
479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05
480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25
481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32
482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07
483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00
484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38
485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24
486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59
487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46
488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09
489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06
490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05
491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23
492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56
493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18
494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49
495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14
496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29
497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25
498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53
499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34
500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20


================================================
FILE: tests/functional/adapter/simple_seed/seeds.py
================================================
trino_seeds__expected_sql_create_table = """
create table {schema}.seed_expected (
seed_id INTEGER,
first_name VARCHAR,
email VARCHAR,
ip_address VARCHAR,
birthday TIMESTAMP WITHOUT TIME ZONE
)
"""

trino_seeds__expected_sql_insert_into = """
INSERT INTO {schema}.seed_expected
    ("seed_id","first_name","email","ip_address","birthday")
VALUES
    (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194',TIMESTAMP '2008-09-12 19:08:31'),
    (2,'Larry','lperkins1@toplist.cz','64.210.133.162',TIMESTAMP '1978-05-09 04:15:14'),
    (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114',TIMESTAMP '2011-10-16 04:07:57'),
    (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98',TIMESTAMP '1973-07-19 10:52:43'),
    (5,'Fred','fwoods4@google.cn','78.229.170.124',TIMESTAMP '2012-09-30 16:38:29'),
    (6,'Stephen','shanson5@livejournal.com','182.227.157.105',TIMESTAMP '1995-11-07 21:40:50'),
    (7,'William','wmartinez6@upenn.edu','135.139.249.50',TIMESTAMP '1982-09-05 03:11:59'),
    (8,'Jessica','jlong7@hao123.com','203.62.178.210',TIMESTAMP '1991-10-16 11:03:15'),
    (9,'Douglas','dwhite8@tamu.edu','178.187.247.1',TIMESTAMP '1979-10-01 09:49:48'),
    (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249',TIMESTAMP '2011-05-26 07:45:49'),
    (11,'Ralph','rfieldsa@home.pl','55.152.163.149',TIMESTAMP '1972-11-18 19:06:11'),
    (12,'Louise','lnicholsb@samsung.com','141.116.153.154',TIMESTAMP '2014-11-25 20:56:14'),
    (13,'Clarence','cduncanc@sfgate.com','81.171.31.133',TIMESTAMP '2011-11-17 07:02:36'),
    (14,'Daniel','dfranklind@omniture.com','8.204.211.37',TIMESTAMP '1980-09-13 00:09:04'),
    (15,'Katherine','klanee@auda.org.au','176.96.134.59',TIMESTAMP '1997-08-22 19:36:56'),
    (16,'Billy','bwardf@wikia.com','214.108.78.85',TIMESTAMP '2003-10-19 02:14:47'),
    (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70',TIMESTAMP '1988-10-28 15:12:35'),
    (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84',TIMESTAMP '1988-08-24 10:50:57'),
    (19,'Roger','rfrazieri@scribd.com','38.145.218.108',TIMESTAMP '1985-12-31 15:17:15'),
    (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17',TIMESTAMP '1970-06-08 02:09:05'),
    (21,'Aaron','arodriguezk@nps.gov','205.245.118.221',TIMESTAMP '1985-10-11 23:07:49'),
    (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182',TIMESTAMP '2006-03-29 12:53:56'),
    (23,'Phillip','pmorenom@intel.com','41.38.254.103',TIMESTAMP '2011-11-07 15:35:43'),
    (24,'Henry','hgarcian@newsvine.com','1.191.216.252',TIMESTAMP '2008-08-28 08:30:44'),
    (25,'Irene','iturnero@opera.com','50.17.60.190',TIMESTAMP '1994-04-01 07:15:02'),
    (26,'Andrew','adunnp@pen.io','123.52.253.176',TIMESTAMP '2000-11-01 06:03:25'),
    (27,'David','dgutierrezq@wp.com','238.23.203.42',TIMESTAMP '1988-01-25 07:29:18'),
    (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185',TIMESTAMP '1983-01-01 13:36:37'),
    (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119',TIMESTAMP '1979-07-16 17:24:12'),
    (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88',TIMESTAMP '2001-04-03 10:00:23'),
    (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47',TIMESTAMP '1986-02-11 21:35:50'),
    (32,'Earl','eortizv@opera.com','166.47.248.240',TIMESTAMP '1996-07-06 08:16:27'),
    (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200',TIMESTAMP '1987-01-31 03:46:44'),
    (34,'Diane','dhowellx@nyu.edu','140.94.133.12',TIMESTAMP '1994-06-11 02:30:05'),
    (35,'Randy','rkennedyy@microsoft.com','73.255.34.196',TIMESTAMP '2005-05-26 20:28:39'),
    (36,'Janice','jriveraz@time.com','22.214.227.32',TIMESTAMP '1990-02-09 04:16:52'),
    (37,'Laura','lperry10@diigo.com','159.148.145.73',TIMESTAMP '2015-03-17 05:59:25'),
    (38,'Gary','gray11@statcounter.com','40.193.124.56',TIMESTAMP '1970-01-27 10:04:51'),
    (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103',TIMESTAMP '2009-03-14 08:14:29'),
    (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239',TIMESTAMP '1993-05-21 14:08:54'),
    (41,'Scott','smoore14@archive.org','38.238.46.83',TIMESTAMP '1980-08-30 11:16:56'),
    (42,'Phillip','pevans15@cisco.com','158.234.59.34',TIMESTAMP '2011-12-15 23:26:31'),
    (43,'Steven','sriley16@google.ca','90.247.57.68',TIMESTAMP '2011-10-29 19:03:28'),
    (44,'Deborah','dbrown17@hexun.com','179.125.143.240',TIMESTAMP '1995-04-10 14:36:07'),
    (45,'Lori','lross18@ow.ly','64.80.162.180',TIMESTAMP '1980-12-27 16:49:15'),
    (46,'Sean','sjackson19@tumblr.com','240.116.183.69',TIMESTAMP '1988-06-12 21:24:45'),
    (47,'Terry','tbarnes1a@163.com','118.38.213.137',TIMESTAMP '1997-09-22 16:43:19'),
    (48,'Dorothy','dross1b@ebay.com','116.81.76.49',TIMESTAMP '2005-02-28 13:33:24'),
    (49,'Samuel','swashington1c@house.gov','38.191.253.40',TIMESTAMP '1989-01-19 21:15:48'),
    (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174',TIMESTAMP '2007-08-11 10:21:49'),
    (51,'Wayne','whudson1e@princeton.edu','90.61.24.102',TIMESTAMP '1983-07-03 16:58:12'),
    (52,'Rose','rjames1f@plala.or.jp','240.83.81.10',TIMESTAMP '1995-06-08 11:46:23'),
    (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145',TIMESTAMP '2016-09-19 14:45:51'),
    (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94',TIMESTAMP '1976-08-17 11:26:19'),
    (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45',TIMESTAMP '2006-05-27 16:51:40'),
    (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23',TIMESTAMP '1975-11-17 08:42:32'),
    (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198',TIMESTAMP '1997-08-06 11:51:09'),
    (58,'Helen','hlarson1l@prweb.com','8.40.21.39',TIMESTAMP '1993-08-04 19:53:40'),
    (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15',TIMESTAMP '1977-08-03 16:37:27'),
    (60,'Joseph','jspencer1n@marriott.com','13.15.63.238',TIMESTAMP '2005-07-23 20:22:06'),
    (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190',TIMESTAMP '1976-05-19 21:47:44'),
    (62,'Joan','jwebb1p@google.ru','105.229.170.71',TIMESTAMP '1972-09-07 17:53:47'),
    (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244',TIMESTAMP '2000-01-01 22:33:37'),
    (64,'Katherine','khunter1r@smh.com.au','248.168.205.32',TIMESTAMP '1991-01-09 06:40:24'),
    (65,'Laura','lvasquez1s@wiley.com','128.129.115.152',TIMESTAMP '1997-10-23 12:04:56'),
    (66,'Juan','jdunn1t@state.gov','44.228.124.51',TIMESTAMP '2004-11-10 05:07:35'),
    (67,'Judith','jholmes1u@wiley.com','40.227.179.115',TIMESTAMP '1977-08-02 17:01:45'),
    (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59',TIMESTAMP '2016-03-06 20:07:23'),
    (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223',TIMESTAMP '1988-09-13 06:07:21'),
    (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33',TIMESTAMP '1995-03-18 22:32:46'),
    (71,'Steven','ssims1y@cbslocal.com','104.50.58.255',TIMESTAMP '2001-08-05 21:26:20'),
    (72,'Betty','bmills1z@arstechnica.com','103.177.214.220',TIMESTAMP '1981-12-14 21:26:54'),
    (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130',TIMESTAMP '2000-04-19 10:13:55'),
    (74,'Donald','dday21@icq.com','9.178.102.255',TIMESTAMP '1972-12-03 00:58:24'),
    (75,'Eric','ethomas22@addtoany.com','85.2.241.227',TIMESTAMP '1992-11-01 05:59:30'),
    (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36',TIMESTAMP '1985-10-24 06:50:01'),
    (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135',TIMESTAMP '2005-10-05 05:17:42'),
    (78,'Harry','hburton25@youtube.com','156.47.176.237',TIMESTAMP '1978-03-26 05:53:33'),
    (79,'Kevin','klawrence26@hao123.com','79.136.183.83',TIMESTAMP '1994-10-12 04:38:52'),
    (80,'David','dhall27@prweb.com','133.149.172.153',TIMESTAMP '1976-12-15 16:24:24'),
    (81,'Kathy','kperry28@twitter.com','229.242.72.228',TIMESTAMP '1979-03-04 02:58:56'),
    (82,'Adam','aprice29@elegantthemes.com','13.145.21.10',TIMESTAMP '1982-11-07 11:46:59'),
    (83,'Brandon','bgriffin2a@va.gov','73.249.128.212',TIMESTAMP '2013-10-30 05:30:36'),
    (84,'Henry','hnguyen2b@discovery.com','211.36.214.242',TIMESTAMP '1985-01-09 06:37:27'),
    (85,'Eric','esanchez2c@edublogs.org','191.166.188.251',TIMESTAMP '2004-05-01 23:21:42'),
    (86,'Jason','jlee2d@jimdo.com','193.92.16.182',TIMESTAMP '1973-01-08 09:05:39'),
    (87,'Diana','drichards2e@istockphoto.com','19.130.175.245',TIMESTAMP '1994-10-05 22:50:49'),
    (88,'Andrea','awelch2f@abc.net.au','94.155.233.96',TIMESTAMP '2002-04-26 08:41:44'),
    (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111',TIMESTAMP '2003-08-25 07:56:39'),
    (90,'Jane','jsims2h@seesaa.net','43.4.220.135',TIMESTAMP '1987-03-20 20:39:04'),
    (91,'Larry','lgrant2i@si.edu','97.126.79.34',TIMESTAMP '2000-09-07 20:26:19'),
    (92,'Louis','ldean2j@prnewswire.com','37.148.40.127',TIMESTAMP '2011-09-16 20:12:14'),
    (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142',TIMESTAMP '1988-07-15 05:06:49'),
    (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187',TIMESTAMP '2009-12-15 06:16:54'),
    (95,'Lori','lstevens2m@icq.com','181.250.181.58',TIMESTAMP '1984-10-28 03:29:19'),
    (96,'Judy','jsimpson2n@marriott.com','180.121.239.219',TIMESTAMP '1986-02-07 15:18:10'),
    (97,'Phillip','phoward2o@usa.gov','255.247.0.175',TIMESTAMP '2002-12-26 08:44:45'),
    (98,'Gloria','gwalker2p@usa.gov','156.140.7.128',TIMESTAMP '1997-10-04 07:58:58'),
    (99,'Paul','pjohnson2q@umn.edu','183.59.198.197',TIMESTAMP '1991-11-14 12:33:55'),
    (100,'Frank','fgreene2r@blogspot.com','150.143.68.121',TIMESTAMP '2010-06-12 23:55:39'),
    (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191',TIMESTAMP '1970-07-08 08:54:23'),
    (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254',TIMESTAMP '2000-04-12 02:39:36'),
    (103,'Edward','eburns2u@dailymotion.com','253.89.118.18',TIMESTAMP '1993-10-10 10:54:01'),
    (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176',TIMESTAMP '1978-08-26 17:07:29'),
    (105,'Donald','dlawrence2w@wp.com','139.200.159.227',TIMESTAMP '2007-07-21 20:56:20'),
    (106,'Matthew','mfreeman2x@google.fr','205.26.239.92',TIMESTAMP '2014-12-05 17:05:39'),
    (107,'Sean','ssanders2y@trellian.com','143.89.82.108',TIMESTAMP '1993-07-14 21:45:02'),
    (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54',TIMESTAMP '1977-04-06 19:07:03'),
    (109,'Jennifer','jwatson30@t-online.de','196.102.127.7',TIMESTAMP '1998-03-07 05:12:23'),
    (110,'Clarence','cbrooks31@si.edu','218.93.234.73',TIMESTAMP '2002-11-06 17:22:25'),
    (111,'Jose','jflores32@goo.gl','185.105.244.231',TIMESTAMP '1995-01-05 06:32:21'),
    (112,'George','glee33@adobe.com','173.82.249.196',TIMESTAMP '2015-01-04 02:47:46'),
    (113,'Larry','lhill34@linkedin.com','66.5.206.195',TIMESTAMP '2010-11-02 10:21:17'),
    (114,'Marie','mmeyer35@mysql.com','151.152.88.107',TIMESTAMP '1990-05-22 20:52:51'),
    (115,'Clarence','cwebb36@skype.com','130.198.55.217',TIMESTAMP '1972-10-27 07:38:54'),
    (116,'Sarah','scarter37@answers.com','80.89.18.153',TIMESTAMP '1971-08-24 19:29:30'),
    (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174',TIMESTAMP '1973-01-27 09:00:42'),
    (118,'Teresa','thenry39@hao123.com','32.187.239.106',TIMESTAMP '2015-11-06 01:48:44'),
    (119,'Billy','bgutierrez3a@sun.com','52.37.70.134',TIMESTAMP '2002-03-19 03:20:19'),
    (120,'Anthony','agibson3b@github.io','154.251.232.213',TIMESTAMP '1991-04-19 01:08:15'),
    (121,'Sandra','sromero3c@wikia.com','44.124.171.2',TIMESTAMP '1998-09-06 20:30:34'),
    (122,'Paula','pandrews3d@blogs.com','153.142.118.226',TIMESTAMP '2003-06-24 16:31:24'),
    (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219',TIMESTAMP '1970-12-09 23:57:21'),
    (124,'Lois','lwilson3f@reuters.com','147.44.171.83',TIMESTAMP '1971-01-09 22:28:51'),
    (125,'Sara','smorgan3g@nature.com','197.67.192.230',TIMESTAMP '1992-01-28 20:33:24'),
    (126,'Charles','ctorres3h@china.com.cn','156.115.216.2',TIMESTAMP '1993-10-02 19:36:34'),
    (127,'Richard','ralexander3i@marriott.com','248.235.180.59',TIMESTAMP '1999-02-03 18:40:55'),
    (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129',TIMESTAMP '1978-09-13 00:37:32'),
    (129,'Steve','sadams3k@economist.com','112.248.91.98',TIMESTAMP '2004-03-21 09:07:43'),
    (130,'Katherine','krobertson3l@ow.ly','37.220.107.28',TIMESTAMP '1977-03-18 19:28:50'),
    (131,'Donna','dgibson3m@state.gov','222.218.76.221',TIMESTAMP '1999-02-01 06:46:16'),
    (132,'Christina','cwest3n@mlb.com','152.114.6.160',TIMESTAMP '1979-12-24 15:30:35'),
    (133,'Sandra','swillis3o@meetup.com','180.71.49.34',TIMESTAMP '1984-09-27 08:05:54'),
    (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186',TIMESTAMP '1979-04-16 16:52:10'),
    (135,'Ruby','rjames3q@wp.com','98.61.54.20',TIMESTAMP '2007-01-13 14:25:52'),
    (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172',TIMESTAMP '2009-07-25 04:34:30'),
    (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146',TIMESTAMP '2012-05-09 22:12:33'),
    (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173',TIMESTAMP '1999-11-09 08:08:44'),
    (139,'Deborah','dray3u@i2i.jp','11.108.186.217',TIMESTAMP '2014-02-04 03:15:19'),
    (140,'Paul','parmstrong3v@alexa.com','6.250.59.43',TIMESTAMP '2009-12-21 10:08:53'),
    (141,'Aaron','abishop3w@opera.com','207.145.249.62',TIMESTAMP '1996-04-25 23:20:23'),
    (142,'Henry','hsanders3x@google.ru','140.215.203.171',TIMESTAMP '2012-01-29 11:52:32'),
    (143,'Anne','aanderson3y@1688.com','74.150.102.118',TIMESTAMP '1982-04-03 13:46:17'),
    (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152',TIMESTAMP '1987-11-03 19:58:41'),
    (145,'Evelyn','ereid40@pbs.org','249.122.33.117',TIMESTAMP '1977-12-14 17:09:57'),
    (146,'Brian','bgonzalez41@wikia.com','246.254.235.141',TIMESTAMP '1991-02-24 00:45:58'),
    (147,'Sandra','sgray42@squarespace.com','150.73.28.159',TIMESTAMP '1972-07-28 17:26:32'),
    (148,'Alice','ajones43@a8.net','78.253.12.177',TIMESTAMP '2002-12-05 16:57:46'),
    (149,'Jessica','jhanson44@mapquest.com','87.229.30.160',TIMESTAMP '1994-01-30 11:40:04'),
    (150,'Louise','lbailey45@reuters.com','191.219.31.101',TIMESTAMP '2011-09-07 21:11:45'),
    (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239',TIMESTAMP '1984-10-24 14:58:04'),
    (152,'Gregory','gcollins47@yandex.ru','28.176.10.115',TIMESTAMP '1998-07-25 17:17:10'),
    (153,'Jane','jperkins48@usnews.com','46.53.164.159',TIMESTAMP '1979-08-19 15:25:00'),
    (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2',TIMESTAMP '1985-07-06 02:16:36'),
    (155,'Adam','acarter4a@scribd.com','78.48.148.204',TIMESTAMP '2005-07-20 03:31:09'),
    (156,'Frank','fweaver4b@angelfire.com','199.180.255.224',TIMESTAMP '2011-03-04 23:07:54'),
    (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231',TIMESTAMP '1991-01-11 10:39:41'),
    (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223',TIMESTAMP '2009-01-17 21:05:15'),
    (159,'Rose','rfoster4e@woothemes.com','203.169.53.16',TIMESTAMP '1991-04-21 02:09:38'),
    (160,'George','ggarrett4f@uiuc.edu','186.61.5.167',TIMESTAMP '1989-11-11 11:29:42'),
    (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38',TIMESTAMP '2012-06-22 18:01:23'),
    (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203',TIMESTAMP '1980-04-16 15:26:34'),
    (163,'Martin','mwells4i@ifeng.com','97.223.55.105',TIMESTAMP '2010-05-26 14:08:18'),
    (164,'Diana','dstone4j@google.ru','90.155.52.47',TIMESTAMP '2013-02-11 00:14:54'),
    (165,'Walter','wferguson4k@blogger.com','30.63.212.44',TIMESTAMP '1986-02-20 17:46:46'),
    (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77',TIMESTAMP '1992-05-13 20:14:14'),
    (167,'Philip','pknight4m@xing.com','15.28.135.167',TIMESTAMP '2000-09-11 18:41:13'),
    (168,'Russell','rcarr4n@youtube.com','113.55.165.50',TIMESTAMP '2008-07-10 17:49:27'),
    (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111',TIMESTAMP '1992-02-10 17:24:58'),
    (170,'Anne','along4p@squidoo.com','36.154.58.107',TIMESTAMP '2012-08-19 23:35:31'),
    (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114',TIMESTAMP '1972-03-11 21:46:44'),
    (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69',TIMESTAMP '2013-01-13 21:34:51'),
    (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144',TIMESTAMP '1998-09-15 14:20:04'),
    (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102',TIMESTAMP '2009-01-22 05:34:38'),
    (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226',TIMESTAMP '1984-09-05 22:33:15'),
    (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46',TIMESTAMP '1990-01-20 12:19:15'),
    (177,'Arthur','afisher4w@jugem.jp','107.255.244.98',TIMESTAMP '1983-10-13 11:08:46'),
    (178,'Nicole','nryan4x@wsj.com','243.211.33.221',TIMESTAMP '1974-05-30 23:19:14'),
    (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101',TIMESTAMP '1992-09-23 02:02:19'),
    (180,'Terry','tcox4z@reference.com','20.189.120.106',TIMESTAMP '1982-02-13 12:43:14'),
    (181,'Ashley','astanley50@kickstarter.com','86.3.56.98',TIMESTAMP '1976-05-09 01:27:16'),
    (182,'Michael','mrivera51@about.me','72.118.249.0',TIMESTAMP '1971-11-11 17:28:37'),
    (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47',TIMESTAMP '2002-08-24 14:59:25'),
    (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30',TIMESTAMP '2002-03-11 13:41:29'),
    (185,'Nicole','nhenderson54@usda.gov','39.253.60.30',TIMESTAMP '1995-04-24 05:55:07'),
    (186,'Ralph','rharper55@purevolume.com','167.147.142.189',TIMESTAMP '1980-02-10 18:35:45'),
    (187,'Heather','hcunningham56@photobucket.com','96.222.196.229',TIMESTAMP '2007-06-15 05:37:50'),
    (188,'Nancy','nlittle57@cbc.ca','241.53.255.175',TIMESTAMP '2007-07-12 23:42:48'),
    (189,'Juan','jramirez58@pinterest.com','190.128.84.27',TIMESTAMP '1978-11-07 23:37:37'),
    (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49',TIMESTAMP '1979-03-31 23:27:28'),
    (191,'Shirley','sstevens5a@prlog.org','200.97.231.248',TIMESTAMP '2011-12-06 07:08:50'),
    (192,'Annie','areyes5b@squidoo.com','223.32.182.101',TIMESTAMP '2011-05-28 02:42:09'),
    (193,'Jack','jkelley5c@tiny.cc','47.34.118.150',TIMESTAMP '1981-12-05 17:31:40'),
    (194,'Keith','krobinson5d@1und1.de','170.210.209.31',TIMESTAMP '1999-03-09 11:05:43'),
    (195,'Joseph','jmiller5e@google.com.au','136.74.212.139',TIMESTAMP '1984-10-08 13:18:20'),
    (196,'Annie','aday5f@blogspot.com','71.99.186.69',TIMESTAMP '1986-02-18 12:27:34'),
    (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107',TIMESTAMP '1983-10-20 17:51:20'),
    (198,'Tammy','tward5h@ucoz.ru','141.43.164.70',TIMESTAMP '1980-03-31 04:45:29'),
    (199,'Doris','dryan5i@ted.com','239.117.202.188',TIMESTAMP '1985-07-03 03:17:53'),
    (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79',TIMESTAMP '1973-04-21 21:36:40'),
    (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161',TIMESTAMP '2001-01-20 01:42:26'),
    (202,'Samuel','soliver5l@people.com.cn','86.127.246.140',TIMESTAMP '1970-09-02 02:19:00'),
    (203,'Carl','csanchez5m@mysql.com','50.149.237.107',TIMESTAMP '1993-12-01 07:02:09'),
    (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201',TIMESTAMP '2004-07-06 18:39:33'),
    (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170',TIMESTAMP '2014-11-11 18:52:19'),
    (206,'Keith','kwillis5p@youtube.com','181.43.206.100',TIMESTAMP '1998-06-13 06:30:51'),
    (207,'Justin','jwebb5q@intel.com','211.54.245.74',TIMESTAMP '2000-11-04 16:58:26'),
    (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104',TIMESTAMP '1992-12-01 19:48:28'),
    (209,'Martin','mpowell5s@flickr.com','153.67.12.241',TIMESTAMP '1983-06-30 06:24:32'),
    (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172',TIMESTAMP '2005-01-08 20:50:22'),
    (211,'Edward','eray5u@a8.net','205.187.246.65',TIMESTAMP '2011-09-26 08:04:44'),
    (212,'Steven','slawson5v@senate.gov','238.150.250.36',TIMESTAMP '1978-11-22 02:48:09'),
    (213,'Robert','rthompson5w@furl.net','70.7.89.236',TIMESTAMP '2001-09-12 08:52:07'),
    (214,'Jack','jporter5x@diigo.com','220.172.29.99',TIMESTAMP '1976-07-26 14:29:21'),
    (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180',TIMESTAMP '2010-03-20 19:21:16'),
    (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173',TIMESTAMP '2001-03-11 05:36:40'),
    (217,'Jimmy','jstephens60@weather.com','145.101.93.235',TIMESTAMP '1983-04-12 09:35:30'),
    (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253',TIMESTAMP '1997-08-29 19:34:34'),
    (219,'Lawrence','lgilbert62@ted.com','243.132.8.78',TIMESTAMP '2015-04-08 22:06:56'),
    (220,'David','dgardner63@4shared.com','204.40.46.136',TIMESTAMP '1971-07-09 03:29:11'),
    (221,'Charles','ckennedy64@gmpg.org','211.83.233.2',TIMESTAMP '2011-02-26 11:55:04'),
    (222,'Lillian','lbanks65@msu.edu','124.233.12.80',TIMESTAMP '2010-05-16 20:29:02'),
    (223,'Ernest','enguyen66@baidu.com','82.45.128.148',TIMESTAMP '1996-07-04 10:07:04'),
    (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223',TIMESTAMP '1983-08-05 12:36:29'),
    (225,'Donald','ddavis68@ustream.tv','47.39.218.137',TIMESTAMP '1989-05-27 02:30:56'),
    (226,'Joe','jscott69@blogspot.com','140.23.131.75',TIMESTAMP '1973-03-16 12:21:31'),
    (227,'Anne','amarshall6a@google.ca','113.162.200.197',TIMESTAMP '1988-12-09 03:38:29'),
    (228,'Willie','wturner6b@constantcontact.com','85.83.182.249',TIMESTAMP '1991-10-06 01:51:10'),
    (229,'Nicole','nwilson6c@sogou.com','30.223.51.135',TIMESTAMP '1977-05-29 19:54:56'),
    (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144',TIMESTAMP '2011-03-13 12:48:47'),
    (231,'Lois','lcarr6e@statcounter.com','0.41.36.53',TIMESTAMP '1993-02-06 04:52:01'),
    (232,'Shirley','scruz6f@tmall.com','37.156.39.223',TIMESTAMP '2007-02-18 17:47:01'),
    (233,'Patrick','pford6g@reverbnation.com','36.198.200.89',TIMESTAMP '1977-03-06 15:47:24'),
    (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137',TIMESTAMP '2014-10-28 01:56:56'),
    (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202',TIMESTAMP '1987-11-30 16:44:47'),
    (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148',TIMESTAMP '1979-01-18 06:33:45'),
    (237,'Pamela','pramirez6k@census.gov','138.233.34.163',TIMESTAMP '2012-01-29 10:35:20'),
    (238,'Daniel','dcarr6l@php.net','146.21.152.242',TIMESTAMP '1984-11-17 08:22:59'),
    (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36',TIMESTAMP '2001-05-30 22:16:44'),
    (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38',TIMESTAMP '2000-01-05 20:35:41'),
    (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220',TIMESTAMP '1996-11-08 01:27:31'),
    (242,'Johnny','jmason6p@flickr.com','192.8.232.114',TIMESTAMP '2013-05-14 05:35:50'),
    (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197',TIMESTAMP '1977-08-20 02:05:17'),
    (244,'Douglas','dcole6r@cmu.edu','128.135.212.69',TIMESTAMP '2016-10-26 17:40:36'),
    (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235',TIMESTAMP '1987-04-26 07:07:13'),
    (246,'Donna','dgreen6t@chron.com','227.116.46.107',TIMESTAMP '2011-07-25 12:59:54'),
    (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89',TIMESTAMP '1974-05-03 20:28:18'),
    (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178',TIMESTAMP '2011-12-20 17:40:18'),
    (249,'Melissa','mmurray6w@bbb.org','114.234.118.137',TIMESTAMP '1991-02-26 12:45:44'),
    (250,'Jean','jlittle6x@epa.gov','141.21.163.254',TIMESTAMP '1991-08-16 04:57:09'),
    (251,'Daniel','dolson6y@nature.com','125.75.104.97',TIMESTAMP '2010-04-23 06:25:54'),
    (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249',TIMESTAMP '2015-01-31 02:21:50'),
    (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26',TIMESTAMP '1971-12-11 10:31:31'),
    (254,'Beverly','broberts71@bluehost.com','244.40.158.89',TIMESTAMP '2013-09-21 13:02:31'),
    (255,'Pamela','pmurray72@netscape.com','218.54.95.216',TIMESTAMP '1985-04-16 00:34:00'),
    (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229',TIMESTAMP '2000-11-11 09:48:28'),
    (257,'Mildred','mpalmer74@is.gd','234.125.95.132',TIMESTAMP '1992-05-25 02:25:02'),
    (258,'Jessica','jcampbell75@google.it','55.98.30.140',TIMESTAMP '2014-08-26 00:26:34'),
    (259,'Beverly','bthomas76@cpanel.net','48.78.228.176',TIMESTAMP '1970-08-18 10:40:05'),
    (260,'Eugene','eward77@cargocollective.com','139.226.204.2',TIMESTAMP '1996-12-04 23:17:00'),
    (261,'Andrea','aallen78@webnode.com','160.31.214.38',TIMESTAMP '2009-07-06 07:22:37'),
    (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122',TIMESTAMP '2005-06-06 11:44:19'),
    (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128',TIMESTAMP '2001-07-03 02:00:10'),
    (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221',TIMESTAMP '1994-08-18 19:45:40'),
    (265,'Russell','rmiller7c@instagram.com','184.130.152.253',TIMESTAMP '1977-11-06 01:58:12'),
    (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206',TIMESTAMP '1990-12-03 22:45:24'),
    (267,'Raymond','rknight7e@yandex.ru','161.2.44.252',TIMESTAMP '1995-08-25 04:31:19'),
    (268,'Bonnie','brussell7f@elpais.com','199.237.57.207',TIMESTAMP '1991-03-29 08:32:06'),
    (269,'Marie','mhenderson7g@elpais.com','52.203.131.144',TIMESTAMP '2004-06-04 21:50:28'),
    (270,'Alan','acarr7h@trellian.com','147.51.205.72',TIMESTAMP '2005-03-03 10:51:31'),
    (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226',TIMESTAMP '2004-08-04 13:42:40'),
    (272,'Christina','cdaniels7j@census.gov','0.238.61.251',TIMESTAMP '1972-10-18 12:47:33'),
    (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56',TIMESTAMP '2013-01-13 10:41:35'),
    (274,'Laura','lwood7l@icio.us','149.153.38.205',TIMESTAMP '2011-06-25 09:33:59'),
    (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172',TIMESTAMP '1999-03-05 03:25:36'),
    (276,'Denise','dparker7n@icq.com','0.213.88.138',TIMESTAMP '2011-11-04 09:43:06'),
    (277,'Phillip','pparker7o@discuz.net','219.242.165.240',TIMESTAMP '1973-10-19 04:22:29'),
    (278,'Joan','jpierce7p@salon.com','63.31.213.202',TIMESTAMP '1989-04-09 22:06:24'),
    (279,'Irene','ibaker7q@cbc.ca','102.33.235.114',TIMESTAMP '1992-09-04 13:00:57'),
    (280,'Betty','bbowman7r@ted.com','170.91.249.242',TIMESTAMP '2015-09-28 08:14:22'),
    (281,'Teresa','truiz7s@boston.com','82.108.158.207',TIMESTAMP '1999-07-18 05:17:09'),
    (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187',TIMESTAMP '2003-01-06 15:45:29'),
    (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184',TIMESTAMP '2010-05-28 01:56:37'),
    (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220',TIMESTAMP '1993-04-03 20:33:51'),
    (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78',TIMESTAMP '1990-01-05 17:28:43'),
    (286,'Terry','tross7x@macromedia.com','29.112.114.133',TIMESTAMP '2009-08-29 21:32:17'),
    (287,'Angela','abradley7y@icq.com','177.44.27.72',TIMESTAMP '1989-10-04 21:46:06'),
    (288,'Maria','mhart7z@dailymotion.com','55.27.55.202',TIMESTAMP '1975-01-21 01:22:57'),
    (289,'Raymond','randrews80@pinterest.com','88.90.78.67',TIMESTAMP '1992-03-16 21:37:40'),
    (290,'Kathy','krice81@bluehost.com','212.63.196.102',TIMESTAMP '2000-12-14 03:06:44'),
    (291,'Cynthia','cramos82@nymag.com','107.89.190.6',TIMESTAMP '2005-06-28 02:02:33'),
    (292,'Kimberly','kjones83@mysql.com','86.169.101.101',TIMESTAMP '2007-06-13 22:56:49'),
    (293,'Timothy','thansen84@microsoft.com','108.100.254.90',TIMESTAMP '2003-04-04 10:31:57'),
    (294,'Carol','cspencer85@berkeley.edu','75.118.144.187',TIMESTAMP '1999-03-30 14:53:21'),
    (295,'Louis','lmedina86@latimes.com','141.147.163.24',TIMESTAMP '1991-04-11 17:53:13'),
    (296,'Margaret','mcole87@google.fr','53.184.26.83',TIMESTAMP '1991-12-19 01:54:10'),
    (297,'Mary','mgomez88@yellowpages.com','208.56.57.99',TIMESTAMP '1976-05-21 18:05:08'),
    (298,'Amanda','aanderson89@geocities.com','147.73.15.252',TIMESTAMP '1987-08-22 15:05:28'),
    (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220',TIMESTAMP '1976-07-15 04:25:04'),
    (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193',TIMESTAMP '1990-09-03 21:39:31'),
    (301,'Lois','lkennedy8c@amazon.de','194.169.29.187',TIMESTAMP '2007-07-29 14:09:31'),
    (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249',TIMESTAMP '2013-09-05 11:32:46'),
    (303,'Betty','belliott8e@wired.com','183.105.222.199',TIMESTAMP '1979-09-19 19:29:13'),
    (304,'Bobby','bmeyer8f@census.gov','36.13.161.145',TIMESTAMP '2014-05-24 14:34:39'),
    (305,'Ann','amorrison8g@sfgate.com','72.154.54.137',TIMESTAMP '1978-10-05 14:22:34'),
    (306,'Daniel','djackson8h@wunderground.com','144.95.32.34',TIMESTAMP '1990-07-27 13:23:05'),
    (307,'Joe','jboyd8i@alibaba.com','187.105.86.178',TIMESTAMP '2011-09-28 16:46:32'),
    (308,'Ralph','rdunn8j@fc2.com','3.19.87.255',TIMESTAMP '1984-10-18 08:00:40'),
    (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215',TIMESTAMP '1998-07-04 12:15:21'),
    (310,'Paula','pdean8l@hhs.gov','161.100.173.197',TIMESTAMP '1973-02-13 09:38:55'),
    (311,'Andrew','agarrett8m@behance.net','199.253.123.218',TIMESTAMP '1991-02-14 13:36:32'),
    (312,'Janet','jhowell8n@alexa.com','39.189.139.79',TIMESTAMP '2012-11-24 20:17:33'),
    (313,'Keith','khansen8o@godaddy.com','116.186.223.196',TIMESTAMP '1987-08-23 21:22:05'),
    (314,'Nicholas','nedwards8p@state.gov','142.175.142.11',TIMESTAMP '1977-03-28 18:27:27'),
    (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192',TIMESTAMP '1994-10-26 11:44:26'),
    (316,'Frank','fgardner8r@mapy.cz','154.77.119.169',TIMESTAMP '1983-01-29 19:19:51'),
    (317,'Eric','eharrison8s@google.cn','245.139.65.123',TIMESTAMP '1984-02-04 09:54:36'),
    (318,'Gregory','gcooper8t@go.com','171.147.0.221',TIMESTAMP '2004-06-14 05:22:08'),
    (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5',TIMESTAMP '1977-01-07 18:23:43'),
    (320,'Juan','jlewis8v@shinystat.com','216.181.171.189',TIMESTAMP '2001-08-23 17:32:43'),
    (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28',TIMESTAMP '1983-02-17 00:05:50'),
    (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148',TIMESTAMP '2004-02-15 10:12:03'),
    (323,'Annie','ahunter8y@example.com','63.36.34.103',TIMESTAMP '2003-07-23 21:15:25'),
    (324,'Melissa','mflores8z@cbc.ca','151.230.217.90',TIMESTAMP '1983-11-02 14:53:56'),
    (325,'Jane','jweaver90@about.me','0.167.235.217',TIMESTAMP '1987-07-29 00:13:44'),
    (326,'Anthony','asmith91@oracle.com','97.87.48.41',TIMESTAMP '2001-05-31 18:44:11'),
    (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51',TIMESTAMP '2015-09-12 23:13:55'),
    (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186',TIMESTAMP '2010-10-28 08:26:27'),
    (329,'Chris','cmurray94@bluehost.com','25.158.167.97',TIMESTAMP '2004-05-05 16:10:31'),
    (330,'Denise','dfuller95@hugedomains.com','216.210.149.28',TIMESTAMP '1979-04-20 08:57:24'),
    (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213',TIMESTAMP '2009-08-15 03:26:16'),
    (332,'Jesse','jhoward97@google.cn','46.181.118.30',TIMESTAMP '1974-04-18 14:08:41'),
    (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87',TIMESTAMP '2006-06-30 14:46:52'),
    (334,'Janice','jwoods99@pen.io','229.245.237.182',TIMESTAMP '1988-04-06 11:52:58'),
    (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80',TIMESTAMP '1976-10-05 08:44:16'),
    (336,'Joshua','jray9b@opensource.org','192.253.12.198',TIMESTAMP '1971-12-25 22:27:07'),
    (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215',TIMESTAMP '2001-12-31 22:08:13'),
    (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201',TIMESTAMP '1983-02-18 20:07:47'),
    (339,'Christine','chudson9e@yelp.com','64.198.43.56',TIMESTAMP '1997-09-08 08:03:43'),
    (340,'Joe','jparker9f@earthlink.net','251.215.148.153',TIMESTAMP '1973-11-04 05:08:18'),
    (341,'Thomas','tkim9g@answers.com','49.187.34.47',TIMESTAMP '1991-08-07 21:13:48'),
    (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16',TIMESTAMP '2009-12-08 02:35:49'),
    (343,'James','jmitchell9i@umich.edu','43.121.18.147',TIMESTAMP '2011-04-28 17:04:09'),
    (344,'Charles','cgardner9j@purevolume.com','197.78.240.240',TIMESTAMP '1998-02-11 06:47:07'),
    (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88',TIMESTAMP '2002-05-10 15:33:14'),
    (346,'Chris','cgray9l@4shared.com','249.70.192.240',TIMESTAMP '1998-10-03 16:43:42'),
    (347,'Gloria','ghayes9m@hibu.com','81.103.138.26',TIMESTAMP '1999-12-26 11:23:13'),
    (348,'Edward','eramirez9n@shareasale.com','38.136.90.136',TIMESTAMP '2010-08-19 08:01:06'),
    (349,'Cheryl','cbutler9o@google.ca','172.180.78.172',TIMESTAMP '1995-05-27 20:03:52'),
    (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6',TIMESTAMP '2014-10-21 01:42:58'),
    (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208',TIMESTAMP '2001-02-08 12:19:06'),
    (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190',TIMESTAMP '1970-01-05 11:29:12'),
    (353,'Sandra','slewis9s@dyndns.org','77.215.201.236',TIMESTAMP '1974-01-05 07:04:04'),
    (354,'Todd','tfranklin9t@g.co','167.125.181.82',TIMESTAMP '2009-09-28 10:13:58'),
    (355,'Joseph','jlewis9u@webmd.com','244.204.6.11',TIMESTAMP '1990-10-21 15:49:57'),
    (356,'Alan','aknight9v@nydailynews.com','152.197.95.83',TIMESTAMP '1996-03-08 08:43:17'),
    (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26',TIMESTAMP '1985-11-30 12:09:24'),
    (358,'Annie','awright9x@cafepress.com','190.45.231.111',TIMESTAMP '2000-08-24 11:56:06'),
    (359,'Diane','dhamilton9y@youtube.com','85.146.171.196',TIMESTAMP '2015-02-24 02:03:57'),
    (360,'Antonio','alane9z@auda.org.au','61.63.146.203',TIMESTAMP '2001-05-13 03:43:34'),
    (361,'Matthew','mallena0@hhs.gov','29.97.32.19',TIMESTAMP '1973-02-19 23:43:32'),
    (362,'Bonnie','bfowlera1@soup.io','251.216.99.53',TIMESTAMP '2013-08-01 15:35:41'),
    (363,'Margaret','mgraya2@examiner.com','69.255.151.79',TIMESTAMP '1998-01-23 22:24:59'),
    (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61',TIMESTAMP '1973-07-13 00:30:22'),
    (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214',TIMESTAMP '2006-11-19 11:52:26'),
    (366,'Mark','mcartera5@cpanel.net','220.33.102.142',TIMESTAMP '2007-09-09 09:43:27'),
    (367,'Paula','ppricea6@msn.com','36.182.238.124',TIMESTAMP '2009-11-11 09:13:05'),
    (368,'Catherine','cgreena7@army.mil','228.203.58.19',TIMESTAMP '2005-08-09 16:52:15'),
    (369,'Helen','hhamiltona8@symantec.com','155.56.194.99',TIMESTAMP '2005-02-01 05:40:36'),
    (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213',TIMESTAMP '2013-11-06 22:10:23'),
    (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48',TIMESTAMP '1994-12-26 23:43:42'),
    (372,'Mark','mmarshallab@tumblr.com','114.74.60.47',TIMESTAMP '2016-09-29 18:03:01'),
    (373,'Andrew','amartinezac@google.cn','182.54.37.130',TIMESTAMP '1976-06-06 17:04:17'),
    (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123',TIMESTAMP '1977-03-28 19:06:59'),
    (375,'Bonnie','bstoneae@php.net','196.149.79.137',TIMESTAMP '1970-02-05 17:05:58'),
    (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218',TIMESTAMP '2008-11-20 21:51:55'),
    (377,'Willie','wwestag@army.mil','35.189.92.118',TIMESTAMP '1992-07-24 05:08:08'),
    (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141',TIMESTAMP '2010-01-25 06:29:01'),
    (379,'Sandra','swardai@baidu.com','63.11.113.240',TIMESTAMP '1985-05-23 08:07:37'),
    (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50',TIMESTAMP '2015-03-05 09:42:07'),
    (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24',TIMESTAMP '1975-07-22 22:22:02'),
    (382,'Juan','jcastilloal@yelp.com','24.174.74.202',TIMESTAMP '2007-01-17 09:32:43'),
    (383,'Donna','dfrazieram@artisteer.com','205.26.147.45',TIMESTAMP '1990-02-11 20:55:46'),
    (384,'Rachel','rfloresan@w3.org','109.60.216.162',TIMESTAMP '1983-05-22 22:42:18'),
    (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130',TIMESTAMP '2009-05-01 18:02:51'),
    (386,'Donald','dbradleyap@etsy.com','42.54.35.126',TIMESTAMP '1997-01-16 16:31:52'),
    (387,'Rachel','rfisheraq@nih.gov','160.243.250.45',TIMESTAMP '2006-02-17 22:05:49'),
    (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111',TIMESTAMP '1976-06-21 03:36:29'),
    (389,'Timothy','twhiteas@ca.gov','36.128.23.70',TIMESTAMP '1975-09-24 03:51:18'),
    (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184',TIMESTAMP '1983-04-27 09:02:50'),
    (391,'Billy','bfowlerau@jimdo.com','91.200.68.196',TIMESTAMP '1995-01-29 06:57:35'),
    (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125',TIMESTAMP '1992-10-27 04:31:39'),
    (393,'Linda','lromeroaw@usa.gov','100.71.233.19',TIMESTAMP '1992-06-08 15:13:18'),
    (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193',TIMESTAMP '2001-11-11 06:51:01'),
    (395,'Katherine','kburkeay@wix.com','151.156.242.141',TIMESTAMP '2010-06-14 19:54:28'),
    (396,'Martha','mharrisonaz@youku.com','21.222.10.199',TIMESTAMP '1989-10-16 14:17:55'),
    (397,'Dennis','dwellsb0@youtu.be','103.16.29.3',TIMESTAMP '1985-12-21 06:05:51'),
    (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234',TIMESTAMP '1982-08-27 01:04:43'),
    (399,'Brenda','bfullerb2@t.co','33.253.63.90',TIMESTAMP '2011-04-20 05:00:35'),
    (400,'Larry','lhendersonb3@disqus.com','88.95.132.128',TIMESTAMP '1982-08-31 02:15:12'),
    (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150',TIMESTAMP '1979-04-15 14:08:09'),
    (402,'Terry','thuntb5@usa.gov','65.91.103.240',TIMESTAMP '1998-05-15 11:50:49'),
    (403,'Harry','hburnsb6@nasa.gov','33.38.21.244',TIMESTAMP '1981-04-12 14:02:20'),
    (404,'Diana','dellisb7@mlb.com','218.229.81.135',TIMESTAMP '1997-01-29 00:17:25'),
    (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216',TIMESTAMP '1984-03-09 17:24:03'),
    (406,'Julia','jlongb9@fotki.com','10.210.12.104',TIMESTAMP '2005-10-26 03:54:13'),
    (407,'Lois','lscottba@msu.edu','188.79.136.138',TIMESTAMP '1973-02-02 18:40:39'),
    (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108',TIMESTAMP '2012-06-09 18:22:26'),
    (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119',TIMESTAMP '1983-09-28 21:11:27'),
    (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79',TIMESTAMP '1970-08-31 21:08:01'),
    (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226',TIMESTAMP '1993-05-22 08:07:07'),
    (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152',TIMESTAMP '1978-12-31 20:24:51'),
    (413,'Christine','cgreenebg@flickr.com','97.25.140.118',TIMESTAMP '1978-07-17 12:55:30'),
    (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211',TIMESTAMP '1984-02-27 13:40:08'),
    (415,'Kimberly','kromerobi@aol.com','73.56.88.247',TIMESTAMP '1976-09-16 14:22:04'),
    (416,'George','gjohnstonbj@fda.gov','240.36.245.185',TIMESTAMP '1979-07-24 14:36:02'),
    (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140',TIMESTAMP '2012-09-12 01:56:41'),
    (418,'Andrea','astevensbl@goo.gl','31.152.207.204',TIMESTAMP '1979-05-24 11:06:21'),
    (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241',TIMESTAMP '1984-02-23 04:07:41'),
    (420,'Terry','tmorenobn@blinklist.com','92.161.34.42',TIMESTAMP '1994-06-25 14:01:35'),
    (421,'Christopher','cmorenobo@go.com','158.86.176.82',TIMESTAMP '1973-09-05 09:18:47'),
    (422,'Dennis','dhansonbp@ning.com','40.160.81.75',TIMESTAMP '1982-01-20 10:19:41'),
    (423,'Beverly','brussellbq@de.vu','138.32.56.204',TIMESTAMP '1997-11-06 07:20:19'),
    (424,'Howard','hparkerbr@163.com','103.171.134.171',TIMESTAMP '2015-06-24 15:37:10'),
    (425,'Helen','hmccoybs@fema.gov','61.200.4.71',TIMESTAMP '1995-06-20 08:59:10'),
    (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125',TIMESTAMP '1977-04-11 07:59:28'),
    (427,'Tina','twestbu@nhs.uk','80.213.117.74',TIMESTAMP '1992-08-19 05:54:44'),
    (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95',TIMESTAMP '1991-09-19 23:22:55'),
    (429,'Ashley','aburtonbw@wix.com','233.176.205.109',TIMESTAMP '2009-11-10 05:01:20'),
    (430,'Eric','emyersbx@1und1.de','168.91.212.67',TIMESTAMP '1987-08-10 07:16:20'),
    (431,'Barbara','blittleby@lycos.com','242.14.189.239',TIMESTAMP '2008-08-02 12:13:04'),
    (432,'Sean','sevansbz@instagram.com','14.39.177.13',TIMESTAMP '2007-04-16 17:28:49'),
    (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76',TIMESTAMP '1980-12-10 02:19:29'),
    (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117',TIMESTAMP '1987-03-01 02:25:45'),
    (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141',TIMESTAMP '1972-09-22 12:59:34'),
    (436,'Nicole','nstewartc3@1688.com','8.164.23.115',TIMESTAMP '1998-10-27 00:10:17'),
    (437,'Earl','ekimc4@ovh.net','100.26.244.177',TIMESTAMP '2013-01-22 10:05:46'),
    (438,'Beverly','breedc5@reuters.com','174.12.226.27',TIMESTAMP '1974-09-22 07:29:36'),
    (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164',TIMESTAMP '1992-06-05 00:43:40'),
    (440,'Charles','cmoorec7@ucoz.com','252.197.131.69',TIMESTAMP '1990-04-09 02:34:05'),
    (441,'Alice','alawsonc8@live.com','183.73.220.232',TIMESTAMP '1989-02-28 09:11:04'),
    (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14',TIMESTAMP '2005-05-02 19:57:21'),
    (443,'Carolyn','cfowlerca@go.com','213.109.55.202',TIMESTAMP '1978-09-10 20:18:20'),
    (444,'Anthony','alongcb@free.fr','169.221.158.204',TIMESTAMP '1984-09-13 01:59:23'),
    (445,'Annie','amoorecc@e-recht24.de','50.34.148.61',TIMESTAMP '2009-03-26 03:41:07'),
    (446,'Carlos','candrewscd@ihg.com','236.69.59.212',TIMESTAMP '1972-03-29 22:42:48'),
    (447,'Beverly','bramosce@google.ca','164.250.184.49',TIMESTAMP '1982-11-10 04:34:01'),
    (448,'Teresa','tlongcf@umich.edu','174.88.53.223',TIMESTAMP '1987-05-17 12:48:00'),
    (449,'Roy','rboydcg@uol.com.br','91.58.243.215',TIMESTAMP '1974-06-16 17:59:54'),
    (450,'Ashley','afieldsch@tamu.edu','130.138.11.126',TIMESTAMP '1983-09-15 05:52:36'),
    (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245',TIMESTAMP '2003-10-22 12:24:03'),
    (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103',TIMESTAMP '1980-11-13 11:01:26'),
    (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151',TIMESTAMP '1982-05-17 23:58:03'),
    (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166',TIMESTAMP '1998-10-04 00:02:21'),
    (455,'Shirley','speterscm@salon.com','248.126.31.15',TIMESTAMP '1987-01-30 06:04:59'),
    (456,'Annie','abowmancn@economist.com','222.213.248.59',TIMESTAMP '2006-03-14 23:52:59'),
    (457,'Jean','jlarsonco@blogspot.com','71.41.25.195',TIMESTAMP '2007-09-08 23:49:45'),
    (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28',TIMESTAMP '2011-03-14 20:25:40'),
    (459,'Norma','nrobinsoncq@economist.com','28.225.21.54',TIMESTAMP '1989-10-21 01:22:43'),
    (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153',TIMESTAMP '2008-06-27 02:27:30'),
    (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249',TIMESTAMP '2016-01-28 16:43:44'),
    (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77',TIMESTAMP '2000-05-24 17:33:06'),
    (463,'Edward','ebryantcu@jigsy.com','140.31.98.193',TIMESTAMP '1992-12-17 08:32:47'),
    (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206',TIMESTAMP '1970-12-05 03:42:19'),
    (465,'Michael','mweavercw@reference.com','7.233.133.213',TIMESTAMP '1987-03-29 02:30:54'),
    (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224',TIMESTAMP '1990-07-30 07:20:57'),
    (467,'Philip','phowardcy@comsenz.com','159.170.247.249',TIMESTAMP '2010-10-15 10:18:37'),
    (468,'Mary','mmarshallcz@xing.com','125.132.189.70',TIMESTAMP '2007-07-19 13:48:47'),
    (469,'Scott','salvarezd0@theguardian.com','78.49.103.230',TIMESTAMP '1987-10-31 06:10:44'),
    (470,'Wayne','wcarrolld1@blog.com','238.1.120.204',TIMESTAMP '1980-11-19 03:26:10'),
    (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49',TIMESTAMP '2010-05-06 22:17:04'),
    (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240',TIMESTAMP '2007-12-12 19:02:51'),
    (473,'Steven','sdixond4@wisc.edu','167.55.237.52',TIMESTAMP '1984-05-05 11:44:37'),
    (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133',TIMESTAMP '2000-07-06 08:44:37'),
    (475,'Jason','jrobinsond6@hexun.com','138.119.139.56',TIMESTAMP '2006-02-03 05:27:45'),
    (476,'Doris','dwoodd7@fema.gov','180.220.156.190',TIMESTAMP '1978-05-11 20:14:20'),
    (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229',TIMESTAMP '2006-11-18 08:29:06'),
    (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1',TIMESTAMP '1985-09-17 02:46:52'),
    (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118',TIMESTAMP '1974-06-07 19:04:05'),
    (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174',TIMESTAMP '1999-03-08 02:43:25'),
    (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234',TIMESTAMP '2007-05-15 13:17:32'),
    (482,'Harold','hmurraydd@parallels.com','133.26.188.80',TIMESTAMP '1993-11-15 03:42:07'),
    (483,'Paula','phowellde@samsung.com','34.215.28.216',TIMESTAMP '1993-11-29 15:55:00'),
    (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123',TIMESTAMP '1986-08-17 10:19:38'),
    (485,'Phyllis','paustindg@vk.com','50.84.34.178',TIMESTAMP '1994-04-13 03:05:24'),
    (486,'Laura','lfosterdh@usnews.com','37.8.101.33',TIMESTAMP '2001-06-30 08:58:59'),
    (487,'Eric','etaylordi@com.com','103.183.253.45',TIMESTAMP '2006-09-15 20:18:46'),
    (488,'Doris','driveradj@prweb.com','247.16.2.199',TIMESTAMP '1989-05-08 09:27:09'),
    (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232',TIMESTAMP '1989-08-01 18:36:06'),
    (490,'Steve','smoralesdl@jigsy.com','3.76.84.207',TIMESTAMP '2011-03-13 17:01:05'),
    (491,'Louis','lsullivandm@who.int','78.135.44.208',TIMESTAMP '1975-11-26 16:01:23'),
    (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21',TIMESTAMP '1990-03-13 16:14:56'),
    (493,'Ann','adixondo@gmpg.org','191.136.222.111',TIMESTAMP '2002-06-05 14:22:18'),
    (494,'Johnny','jhartdp@amazon.com','103.252.198.39',TIMESTAMP '1988-07-30 23:54:49'),
    (495,'Susan','srichardsdq@skype.com','126.247.192.11',TIMESTAMP '2005-01-09 12:08:14'),
    (496,'Brenda','bparkerdr@skype.com','63.232.216.86',TIMESTAMP '1974-05-18 05:58:29'),
    (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112',TIMESTAMP '2014-08-05 18:22:25'),
    (498,'Larry','lhayesdt@wordpress.com','162.146.13.46',TIMESTAMP '1997-02-26 14:01:53'),
    (499,NULL,'ethomasdu@hhs.gov','6.241.88.250',TIMESTAMP '2007-09-14 13:03:34'),
    (500,'Paula','pshawdv@networksolutions.com','123.27.47.249',TIMESTAMP '2003-10-30 21:19:20')
"""


================================================
FILE: tests/functional/adapter/simple_seed/test_seed.py
================================================
from pathlib import Path

import pytest
from dbt.tests.adapter.simple_seed.test_seed import (
    TestBasicSeedTests as CoreTestBasicSeedTests,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedConfigFullRefreshOff as CoreTestSeedConfigFullRefreshOff,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedConfigFullRefreshOn as CoreTestSeedConfigFullRefreshOn,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedCustomSchema as CoreTestSeedCustomSchema,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedParsing as CoreTestSeedParsing,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedSpecificFormats as CoreTestSeedSpecificFormats,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedWithEmptyDelimiter as CoreTestSeedWithEmptyDelimiter,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedWithUniqueDelimiter as CoreTestSeedWithUniqueDelimiter,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSeedWithWrongDelimiter as CoreTestSeedWithWrongDelimiter,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSimpleSeedEnabledViaConfig as CoreTestSimpleSeedEnabledViaConfig,
)
from dbt.tests.adapter.simple_seed.test_seed import (
    TestSimpleSeedWithBOM as CoreTestSimpleSeedWithBOM,
)
from dbt.tests.util import copy_file, run_dbt

from tests.functional.adapter.simple_seed.seeds import (
    trino_seeds__expected_sql_create_table,
    trino_seeds__expected_sql_insert_into,
)


class TrinoSetUpFixture:
    @pytest.fixture(scope="class", autouse=True)
    def setUp(self, project):
        """Create table for ensuring seeds and models used in tests build correctly"""
        project.run_sql(trino_seeds__expected_sql_create_table)
        project.run_sql(trino_seeds__expected_sql_insert_into)


class TestTrinoBasicSeedTests(TrinoSetUpFixture, CoreTestBasicSeedTests):
    # TODO Trino currently does not support DROP TABLE CASCADE.
    #  Dropping seed won't drop downstream models automatically.
    @pytest.mark.skip
    def test_simple_seed_full_refresh_flag(self, project):
        pass


# TODO Trino currently does not support DROP TABLE CASCADE.
#  Dropping seed won't drop downstream models automatically.
@pytest.mark.skip
class TestTrinoSeedConfigFullRefreshOn(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOn):
    pass


class TestTrinoSeedConfigFullRefreshOff(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOff):
    pass


class TestTrinoSeedCustomSchema(TrinoSetUpFixture, CoreTestSeedCustomSchema):
    pass


class TestTrinoSeedWithUniqueDelimiter(TrinoSetUpFixture, CoreTestSeedWithUniqueDelimiter):
    pass


class TestTrinoSeedWithWrongDelimiter(TrinoSetUpFixture, CoreTestSeedWithWrongDelimiter):
    def test_seed_with_wrong_delimiter(self, project):
        """Testing failure of running dbt seed with a wrongly configured delimiter"""
        seed_result = run_dbt(["seed"], expect_pass=False)
        assert "syntax_error" in seed_result.results[0].message.lower()


class TestTrinoSeedWithEmptyDelimiter(TrinoSetUpFixture, CoreTestSeedWithEmptyDelimiter):
    pass


class TestTrinoSimpleSeedEnabledViaConfig(CoreTestSimpleSeedEnabledViaConfig):
    pass


class TestTrinoSeedParsing(TrinoSetUpFixture, CoreTestSeedParsing):
    pass


class TestTrinoSimpleSeedWithBOM(CoreTestSimpleSeedWithBOM):
    @pytest.fixture(scope="class", autouse=True)
    def setUp(self, project):
        """Create table for ensuring seeds and models used in tests build correctly"""
        project.run_sql(trino_seeds__expected_sql_create_table)
        project.run_sql(trino_seeds__expected_sql_insert_into)
        copy_file(
            project.test_dir,
            "seed_bom.csv",
            project.project_root / Path("seeds") / "seed_bom.csv",
            "",
        )


class TestTrinoSeedSpecificFormats(CoreTestSeedSpecificFormats):
    pass


================================================
FILE: tests/functional/adapter/store_failures/fixtures.py
================================================
seed_csv = """
id,value
1,1
2,2
3,3
4,4
""".lstrip()

table_model = """
select * from {{ ref('seed') }}
"""

table_profile_yml = """
version: 2
models:
  - name: table_model
    columns:
      - name: id
        tests:
          - unique
          - not_null
      - name: value
        quote: true
        tests:
          - not_null
          - accepted_values:
              values:
                - 1
                - 2
                - 3
                - 4
              quote: false

seeds:
  - name: seed
    columns:
      - name: id
      - name: value
        tests:
          - not_null
"""


================================================
FILE: tests/functional/adapter/store_failures/test_store_failures.py
================================================
import pytest
from dbt.tests.adapter.store_test_failures_tests import basic
from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import (
    TestStoreTestFailures,
)
from dbt.tests.util import run_dbt

from tests.functional.adapter.store_failures.fixtures import (
    seed_csv,
    table_model,
    table_profile_yml,
)


class TestStoreFailuresTable:
    @property
    def schema(self):
        return "default"

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "store_failures_tests",
            "quoting": {
                "database": False,
                "schema": False,
                "identifier": True,
            },
            "models": {
                "+materialized": "table",
            },
            "tests": {
                "+store_failures": True,
            },
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "table_model.sql": table_model,
            "table_store_failures.yml": table_profile_yml,
        }

    @pytest.fixture(autouse=True)
    def teardown_method(self, project):
        yield
        with project.adapter.connection_named("__test"):
            relation = project.adapter.Relation.create(
                database=project.database, schema=f"{project.test_schema}_dbt_test__audit"
            )
            project.adapter.drop_schema(relation)

    def test_run_seed_test(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        # test tests
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 5
        # test tests 2nd times
        results = run_dbt(["test"], expect_pass=True)
        assert len(results) == 5


class TestTrinoTestStoreTestFailures(TestStoreTestFailures):
    pass


class TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions):
    pass


class TestStoreTestFailuresAsProjectLevelOff(basic.StoreTestFailuresAsProjectLevelOff):
    pass


class TestStoreTestFailuresAsProjectLevelView(basic.StoreTestFailuresAsProjectLevelView):
    pass


class TestStoreTestFailuresAsGeneric(basic.StoreTestFailuresAsGeneric):
    pass


class TestStoreTestFailuresAsProjectLevelEphemeral(basic.StoreTestFailuresAsProjectLevelEphemeral):
    pass


class TestStoreTestFailuresAsExceptions(basic.StoreTestFailuresAsExceptions):
    pass


================================================
FILE: tests/functional/adapter/test_basic.py
================================================
import pytest
from dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats
from dbt.tests.adapter.basic.files import generic_test_seed_yml
from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod
from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations
from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate
from dbt.tests.adapter.basic.test_empty import BaseEmpty
from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral
from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests
from dbt.tests.adapter.basic.test_incremental import (
    BaseIncremental,
    BaseIncrementalNotSchemaChange,
)
from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests
from dbt.tests.adapter.basic.test_singular_tests_ephemeral import (
    BaseSingularTestsEphemeral,
)
from dbt.tests.adapter.basic.test_validate_connection import BaseValidateConnection
from dbt.tests.util import run_dbt

seeds_base_csv = """
id,name,some_date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
5,Hannah,1982-06-23 05:41:26
6,Eleanor,1991-08-10 23:12:21
7,Lily,1971-03-29 14:58:02
8,Jonathan,1988-02-26 02:55:24
9,Adrian,1994-02-09 13:14:23
10,Nora,1976-03-01 16:51:39
""".lstrip()


seeds_added_csv = (
    seeds_base_csv
    + """
11,Mateo,2014-09-07 17:04:27
12,Julian,2000-02-04 11:48:30
13,Gabriel,2001-07-10 07:32:52
14,Isaac,2002-11-24 03:22:28
15,Levi,2009-11-15 11:57:15
16,Elizabeth,2005-04-09 03:50:11
17,Grayson,2019-08-06 19:28:17
18,Dylan,2014-03-01 11:50:41
19,Jayden,2009-06-06 07:12:49
20,Luke,2003-12-05 21:42:18
""".lstrip()
)


seed__schema_yml = """
version: 2
seeds:
  - name: seed
    description: "The test seed"
    columns:
      - name: id
        description: The user ID number
      - name: first_name
        description: The user's first name
      - name: email
        description: The user's email
      - name: ip_address
        description: The user's IP address
      - name: updated_at
        description: The last time this user's email was updated
"""

seed__seed_csv = """id,first_name,email,ip_address,updated_at
1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31
"""

incremental_not_schema_change_sql = """
{{ config(materialized="incremental", unique_key="user_id_current_time",on_schema_change="sync_all_columns") }}
select
    '1' || '-' || cast(current_timestamp as varchar) as user_id_current_time,
    {% if is_incremental() %}
        'thisis18characters' as platform
    {% else %}
        'okthisis20characters' as platform
    {% endif %}
"""


class TestAdapterMethods(BaseAdapterMethod):
    pass


# TODO Internal Galaxy issue: type=INTERNAL_ERROR, name=GENERIC_INTERNAL_ERROR,
# message="Unexpected response status (Internal Server Error) performing operation: entity created
@pytest.mark.skip_profile("starburst_galaxy")
class TestSimpleMaterializationsTrino(BaseSimpleMaterializations):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "base",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
        }


class TestSingularTestsTrino(BaseSingularTests):
    pass


class TestSingularTestsEphemeralTrino(BaseSingularTestsEphemeral):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "singular_tests_ephemeral",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
        }


class TestEmptyTrino(BaseEmpty):
    pass


class TestEphemeralTrino(BaseEphemeral):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "ephemeral",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "base.csv": seeds_base_csv,
        }


class TestIncrementalTrino(BaseIncremental):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "incremental",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"base.csv": seeds_base_csv, "added.csv": seeds_added_csv}


class TestIncrementalFullRefreshTrino(TestIncrementalTrino):
    def test_incremental(self, project):
        super().test_incremental(project)
        results = run_dbt(["run", "--vars", "seed_name: base", "--full-refresh"])
        assert len(results) == 1


class TestIncrementalNotSchemaChangeTrino(BaseIncrementalNotSchemaChange):
    @pytest.fixture(scope="class")
    def models(self):
        return {"incremental_not_schema_change.sql": incremental_not_schema_change_sql}


class TestGenericTestsTrino(BaseGenericTests):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "generic_tests",
            "seeds": {
                "+column_types": {"some_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"base.csv": seeds_base_csv, "schema.yml": generic_test_seed_yml}


class TestTrinoValidateConnection(BaseValidateConnection):
    pass


class TestDocsGenerateTrino(BaseDocsGenerate):
    @pytest.fixture(scope="class")
    def project_config_update(self, unique_schema):
        alternate_schema = unique_schema + "_test"
        return {
            "asset-paths": ["assets", "invalid-asset-paths"],
            "vars": {
                "test_schema": unique_schema,
                "alternate_schema": alternate_schema,
            },
            "seeds": {
                "quote_columns": True,
                "+column_types": {"updated_at": "timestamp(6)"},
            },
            "quoting": {"identifier": False},
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"schema.yml": seed__schema_yml, "seed.csv": seed__seed_csv}

    @pytest.fixture(scope="class")
    def expected_catalog(self, project, profile_user):
        return base_expected_catalog(
            project,
            role=None,
            id_type="integer",
            text_type="varchar",
            time_type="timestamp(6)",
            view_type="VIEW",
            table_type="BASE TABLE",
            model_stats=no_stats(),
        )


================================================
FILE: tests/functional/adapter/test_caching.py
================================================
from dbt.tests.adapter.caching.test_caching import (
    BaseCachingLowercaseModel,
    BaseCachingSelectedSchemaOnly,
    BaseCachingUppercaseModel,
)


class TestCachingLowerCaseModel(BaseCachingLowercaseModel):
    pass


class TestCachingUppercaseModel(BaseCachingUppercaseModel):
    pass


class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly):
    pass


================================================
FILE: tests/functional/adapter/test_changing_relation_type.py
================================================
from dbt.tests.adapter.relations.test_changing_relation_type import (
    BaseChangeRelationTypeValidator,
)


class TestTrinoChangeRelationTypes(BaseChangeRelationTypeValidator):
    pass


================================================
FILE: tests/functional/adapter/test_concurrency.py
================================================
from dbt.tests.adapter.concurrency.test_concurrency import (
    BaseConcurrency,
    seeds__update_csv,
)
from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file


class TestConcurrencyTrino(BaseConcurrency):
    def test_concurrency(self, project):
        run_dbt(["seed", "--select", "seed"])
        results = run_dbt(["run"], expect_pass=False)
        assert len(results) == 7
        check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"])
        check_relations_equal(project.adapter, ["SEED", "DEP"])
        check_relations_equal(project.adapter, ["SEED", "TABLE_A"])
        check_relations_equal(project.adapter, ["SEED", "TABLE_B"])

        rm_file(project.project_root, "seeds", "seed.csv")
        write_file(seeds__update_csv, project.project_root + "/seeds", "seed.csv")
        results = run_dbt(["run"], expect_pass=False)
        assert len(results) == 7
        check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"])
        check_relations_equal(project.adapter, ["SEED", "DEP"])
        check_relations_equal(project.adapter, ["SEED", "TABLE_A"])
        check_relations_equal(project.adapter, ["SEED", "TABLE_B"])


================================================
FILE: tests/functional/adapter/test_custom_schema.py
================================================
from abc import ABC, abstractmethod

import pytest
from dbt.tests.util import run_dbt, run_sql_with_adapter

seed_csv = """
id,name,date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
""".lstrip()


class CustomSchemaBase(ABC):
    """
    This test is meant to ensure that Trino table, view, incremental materialization
    works as expected for custom schemas
    """

    # set custom schema name
    custom_schema_name = "very_custom_schema_name"

    @property
    @abstractmethod
    def table_type(self):
        pass

    @property
    @abstractmethod
    def materialization(self):
        pass

    # define model
    def custom_schema_model(self, materialization):
        return f"""
                    {{{{
                        config(
                        materialized="{materialization}",
                        schema="{self.custom_schema_name}"
                        )
                    }}}}
                    select * from {{{{ ref('seed') }}}}
                """

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "seeds": {
                "+column_types": {"date": "timestamp(6)"},
            },
        }

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            f"custom_schema_{self.materialization()}_model.sql": self.custom_schema_model(
                self.materialization()
            )
        }

    @pytest.fixture(scope="function", autouse=True)
    def teardown_method(self, project):
        yield
        relation = project.adapter.Relation.create(
            database=project.database, schema=f"{project.test_schema}_{self.custom_schema_name}"
        )
        project.adapter.drop_schema(relation)

    def test_custom_schema_trino(self, project):
        # Seed seeds, run models.
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # Fetch info from information_schema about just created table/view.
        sql = f"""
            select * from {project.adapter.config.credentials.database}.information_schema.tables
            where table_catalog = '{project.adapter.config.credentials.database}'
            and table_schema = '{project.adapter.config.credentials.schema}_{self.custom_schema_name}'
        """.strip()
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")

        # Check if fetched info is as expected to be.
        assert len(results) == 1
        assert results[0][0] == project.adapter.config.credentials.database
        assert (
            results[0][1]
            == f"{project.adapter.config.credentials.schema}_{self.custom_schema_name}"
        )
        assert results[0][2] == f"custom_schema_{self.materialization()}_model"
        assert results[0][3] == self.table_type()


class TestCustomSchemaTable(CustomSchemaBase):
    def materialization(self):
        return "table"

    def table_type(self):
        return "BASE TABLE"


class TestCustomSchemaView(CustomSchemaBase):
    def materialization(self):
        return "view"

    def table_type(self):
        return "VIEW"


class TestCustomSchemaIncremental(CustomSchemaBase):
    def materialization(self):
        return "incremental"

    def table_type(self):
        return "BASE TABLE"


================================================
FILE: tests/functional/adapter/test_ephemeral.py
================================================
from dbt.tests.adapter.ephemeral.test_ephemeral import (
    BaseEphemeralErrorHandling,
    BaseEphemeralMulti,
    BaseEphemeralNested,
)
from dbt.tests.util import check_relations_equal, run_dbt


class TestEphemeralMultiTrino(BaseEphemeralMulti):
    def test_ephemeral_multi(self, project):
        run_dbt(["seed"])
        results = run_dbt(["run"])
        assert len(results) == 3
        check_relations_equal(
            project.adapter, ["SEED", "DEPENDENT", "DOUBLE_DEPENDENT", "SUPER_DEPENDENT"]
        )


class TestEphemeralNestedTrino(BaseEphemeralNested):
    def test_ephemeral_nested(self, project):
        results = run_dbt(["run"])
        assert len(results) == 2


class TestEphemeralErrorHandlingTrino(BaseEphemeralErrorHandling):
    pass


================================================
FILE: tests/functional/adapter/test_get_incremental_tmp_relation_type_macro.py
================================================
from abc import ABC, abstractmethod

import pytest
from dbt.tests.util import run_dbt, run_sql_with_adapter


class CustomSchemaBase(ABC):
    """
    This test is meant to ensure that get_incremental_tmp_relation_type macro
    is returning expected values on certain inputs.
    """

    @property
    @abstractmethod
    def expected_types(self):
        # Expected table/view type returned from created model.
        # Order based on columns' order in model definition.
        return ["table", "view", "view", "view", "table", "view", "table"]

    # define model
    def incremental_model(self):
        return """
                    select
                    '{{ get_incremental_tmp_relation_type('delete+insert', 'foo', 'sql') }}' AS delete_plus_insert_strategy,
                    '{{ get_incremental_tmp_relation_type('append', 'foo', 'sql') }}' AS append_strategy,
                    '{{ get_incremental_tmp_relation_type('default', 'foo', 'sql') }}' AS default_strategy,
                    '{{ get_incremental_tmp_relation_type('merge', 'foo', 'sql') }}' AS merge_strategy,
                    '{{ get_incremental_tmp_relation_type('foo', 'some_unique_key', 'sql') }}' AS unique_key,
                    '{{ get_incremental_tmp_relation_type('foo', None, 'sql') }}' AS no_unique_key,
                    '{{ get_incremental_tmp_relation_type('default', 'foo', 'python') }}' AS python_model
                """

    @pytest.fixture(scope="class")
    @abstractmethod
    def project_config_update(self):
        pass

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {"test_get_incremental_tmp_relation_type.sql": self.incremental_model()}

    def test_get_incremental_tmp_relation_type(self, project):
        # Run models.
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1

        # Fetch info from get_incremental_tmp_relation_type macro output.
        sql = f"""
            select * from {project.adapter.config.credentials.database}.{project.adapter.config.credentials.schema}.test_get_incremental_tmp_relation_type
        """.strip()
        results = run_sql_with_adapter(project.adapter, sql, fetch="all")

        # Check if fetched info is as expected to be.
        assert len(results) == 1
        assert results[0] == self.expected_types


class TestViewsEnabled(CustomSchemaBase):
    @property
    def expected_types(self):
        return super().expected_types

    @pytest.fixture(scope="class")
    def project_config_update(self):
        # Not specifying views_enabled config,
        # as it is 'True' by default
        pass


class TestViewsNotEnabled(CustomSchemaBase):
    @property
    def expected_types(self):
        # Expected type is 'table' for every config,
        # as views_enabled is set to 'False'.
        return ["table" for _ in super().expected_types]

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "models": {"+views_enabled": False},
        }


================================================
FILE: tests/functional/adapter/test_grants.py
================================================
import pytest
from dbt.context.base import BaseContext  # diff_of_two_dicts only
from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants
from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants


@pytest.mark.hive
# TODO: setup Galaxy and Starbust tests
#   See https://github.com/starburstdata/dbt-trino/issues/147
#   and also https://github.com/starburstdata/dbt-trino/issues/146
@pytest.mark.skip_profile("starburst_galaxy")
# To run this test locally add following env vars:
# DBT_TEST_USER_1=user1
# DBT_TEST_USER_2=user2
# DBT_TEST_USER_3=user3
class TestModelGrantsTrino(BaseModelGrants):
    def assert_expected_grants_match_actual(self, project, relation_name, expected_grants):
        actual_grants = self.get_grants_on_relation(project, relation_name)
        # Remove the creation user
        try:
            for privilege in ["delete", "update", "insert", "select"]:
                if privilege in actual_grants:
                    actual_grants[privilege].remove("admin")
                    if len(actual_grants[privilege]) == 0:
                        del actual_grants[privilege]
        except ValueError:
            pass

        # need a case-insensitive comparison
        # so just a simple "assert expected == actual_grants" won't work
        diff_a = BaseContext.diff_of_two_dicts(actual_grants, expected_grants)
        diff_b = BaseContext.diff_of_two_dicts(expected_grants, actual_grants)
        assert diff_a == diff_b == {}


@pytest.mark.hive
# TODO: setup Galaxy and Starbust tests, might need separate tests
#   See https://github.com/starburstdata/dbt-trino/issues/147
#   and also https://github.com/starburstdata/dbt-trino/issues/146
@pytest.mark.skip(reason="Hive doesn't raise errors on invalid roles")
class TestInvalidGrantsTrino(BaseInvalidGrants):
    pass


================================================
FILE: tests/functional/adapter/test_query_comments.py
================================================
from dbt.tests.adapter.query_comment.test_query_comment import (
    BaseEmptyQueryComments,
    BaseMacroArgsQueryComments,
    BaseMacroInvalidQueryComments,
    BaseMacroQueryComments,
    BaseNullQueryComments,
    BaseQueryComments,
)


class TestQueryCommentsTrino(BaseQueryComments):
    pass


class TestMacroQueryCommentsTrino(BaseMacroQueryComments):
    pass


class TestMacroArgsQueryCommentsTrino(BaseMacroArgsQueryComments):
    pass


class TestMacroInvalidQueryCommentsTrino(BaseMacroInvalidQueryComments):
    pass


class TestNullQueryCommentsTrino(BaseNullQueryComments):
    pass


class TestEmptyQueryCommentsTrino(BaseEmptyQueryComments):
    pass


================================================
FILE: tests/functional/adapter/test_quote_policy.py
================================================
import pytest

from tests.functional.adapter.test_basic import TestIncrementalTrino


@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
    return "sChEmAWiThMiXeDCaSe"


class TestTrinoQuotePolicy(TestIncrementalTrino):
    pass


================================================
FILE: tests/functional/adapter/test_sample_mode.py
================================================
from dbt.tests.adapter.sample_mode.test_sample_mode import BaseSampleModeTest


class TestTrinoSampleMode(BaseSampleModeTest):
    pass


================================================
FILE: tests/functional/adapter/test_seeds_column_types_overrides.py
================================================
import pytest
from dbt.tests.util import get_connection, relation_from_name, run_dbt

boolean_type = """
boolean_example
true
""".lstrip()

datetime_type = """
date_example,time_example,time_p_example,time_tz_example,timestamp_example,timestamp_p_example,timestamp_tz_example,timestamp_p_tz_example,interval_ym_example,interval_ds_example
2018-01-05,01:02:03.456,01:02:03.456789,01:02:03.456 -08:00,2020-06-10 15:55:23.383,2020-06-10 15:55:23.383345,2001-08-22 03:04:05.321-08:00,2001-08-22 03:04:05.321456-08:00,'3' MONTH,'2' DAY
,,,,,,,,,
""".lstrip()

number_type = """
integer_example,tinyint_example,smallint_example,bigint_example,real_example,double_example,decimal_example,decimal_p_example
1,2,3,4,10.3e0,10.3e0,1.1,1.23
,,,,,,
""".lstrip()

string_type = """varchar_example,varchar_n_example,char_example,char_n_example,varbinary_example,json_example
test,abc,d,ghi,65683F,"{""k1"":1,""k2"":23,""k3"":456}"
,,,,,
""".lstrip()

seed_types = {
    "boolean_type": {
        "boolean_example": "boolean",
    },
    "datetime_type": {
        "date_example": "date",
        "time_example": "time",
        "time_p_example": "time(6)",
        "time_tz_example": "time with time zone",
        "timestamp_example": "timestamp",
        "timestamp_p_example": "timestamp(6)",
        "timestamp_tz_example": "timestamp with time zone",
        "timestamp_p_tz_example": "timestamp(6) with time zone",
        "interval_ym_example": "interval year to month",
        "interval_ds_example": "interval day to second",
    },
    "number_type": {
        "integer_example": "integer",
        "tinyint_example": "tinyint",
        "smallint_example": "smallint",
        "bigint_example": "bigint",
        "real_example": "real",
        "double_example": "double",
        "decimal_example": "decimal",
        "decimal_p_example": "decimal(3,2)",
    },
    "string_type": {
        "varchar_example": "varchar",
        "varchar_n_example": "varchar(10)",
        "char_example": "char",
        "char_n_example": "char(10)",
        "varbinary_example": "varbinary",
        "json_example": "json",
    },
}


# function copied from dbt.tests.util. Original function doesn't return numeric_precision and numeric_scale.
def get_relation_columns(adapter, name):
    relation = relation_from_name(adapter, name)
    with get_connection(adapter):
        columns = adapter.get_columns_in_relation(relation)
        return sorted(
            (
                (c.name, c.dtype, c.char_size, c.numeric_precision, c.numeric_scale)
                for c in columns
            ),
            key=lambda x: x[0],
        )


@pytest.mark.skip_profile("starburst_galaxy")
class TestSeedsColumnTypesOverrides:
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "seeds": {
                "test": {
                    "boolean_type": {"+column_types": seed_types["boolean_type"]},
                    "datetime_type": {"+column_types": seed_types["datetime_type"]},
                    "number_type": {"+column_types": seed_types["number_type"]},
                    "string_type": {"+column_types": seed_types["string_type"]},
                }
            }
        }

    # everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "boolean_type.csv": boolean_type,
            "datetime_type.csv": datetime_type,
            "number_type.csv": number_type,
            "string_type.csv": string_type,
        }

    def test_seeds_column_overrides(self, project):
        # seed seeds
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 4

        actual_columns = {}
        for seed_name, seed_columns in seed_types.items():
            # retrieve information about columns from trino
            actual_columns[seed_name] = get_relation_columns(project.adapter, seed_name)

        assert actual_columns == {
            "boolean_type": [
                ("boolean_example", "boolean", None, None, None),
            ],
            "datetime_type": [
                ("date_example", "date", None, None, None),
                ("interval_ds_example", "interval day to second", None, None, None),
                ("interval_ym_example", "interval year to month", None, None, None),
                ("time_example", "time(3)", None, None, None),
                ("time_p_example", "time(6)", None, None, None),
                ("time_tz_example", "time(3) with time zone", None, None, None),
                ("timestamp_example", "timestamp(3)", None, None, None),
                ("timestamp_p_example", "timestamp(6)", None, None, None),
                ("timestamp_p_tz_example", "timestamp(6) with time zone", None, None, None),
                ("timestamp_tz_example", "timestamp(3) with time zone", None, None, None),
            ],
            "number_type": [
                ("bigint_example", "bigint", None, None, None),
                ("decimal_example", "decimal", None, 38, 0),
                ("decimal_p_example", "decimal", None, 3, 2),
                ("double_example", "double", None, None, None),
                ("integer_example", "integer", None, None, None),
                ("real_example", "real", None, None, None),
                ("smallint_example", "smallint", None, None, None),
                ("tinyint_example", "tinyint", None, None, None),
            ],
            "string_type": [
                ("char_example", "char", 1, None, None),
                ("char_n_example", "char", 10, None, None),
                ("json_example", "json", None, None, None),
                ("varbinary_example", "varbinary", None, None, None),
                ("varchar_example", "varchar", None, None, None),
                ("varchar_n_example", "varchar", 10, None, None),
            ],
        }


================================================
FILE: tests/functional/adapter/test_session_property.py
================================================
import pytest
from dbt.tests.util import run_dbt

set_session_property = "set session query_max_run_time='20s'"


class TestSessionProperty:
    """
    This test is ensuring that setting session properties through pre_hook is working as expected.
    Test is asserting, that session property passed in 'pre_hook' config in model definition
    matches pre_hook value extracted from RunExecutionResult object.
    """

    @property
    def schema(self):
        return "default"

    def session_property_model(self, prehook):
        return f"""
                    {{{{
                        config(
                            pre_hook="{prehook}"
                        )
                    }}}}
                    select 'OK' as status
                """

    # everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {"session_property_model.sql": self.session_property_model(set_session_property)}

    def test_custom_schema_trino(self, project):
        # Run models.
        results = run_dbt(["run"], expect_pass=True)
        assert len(results) == 1
        assert set_session_property == results.results[0].node.config.pre_hook[0].sql


================================================
FILE: tests/functional/adapter/test_simple_copy.py
================================================
import pytest
from dbt.tests.adapter.simple_copy.test_simple_copy import (
    EmptyModelsArentRunBase,
    SimpleCopyBase,
)
from dbt.tests.util import run_dbt


@pytest.mark.iceberg
class TestSimpleCopyBase(SimpleCopyBase):
    def test_simple_copy_with_materialized_views(self, project):
        project.run_sql(f"create table {project.test_schema}.unrelated_table (id int)")
        sql = f"""
            create materialized view {project.test_schema}.unrelated_materialized_view as (
                select * from {project.test_schema}.unrelated_table
            )
        """
        project.run_sql(sql)
        sql = f"""
            create view {project.test_schema}.unrelated_view as (
                select * from {project.test_schema}.unrelated_materialized_view
            )
        """
        project.run_sql(sql)
        results = run_dbt(["seed"])
        assert len(results) == 1
        results = run_dbt()
        assert len(results) == 7

        # clean up
        # TODO: check if this clean-up is still needed
        #  after implementing CASCADE in iceberg, delta, hive connectors
        #  if not, entire method could be deleted
        project.run_sql("drop view unrelated_view")
        project.run_sql("drop materialized view unrelated_materialized_view")
        project.run_sql("drop table unrelated_table")


# Trino implementation of dbt.tests.fixtures.project.TestProjInfo.get_tables_in_schema
# which use `like` instead of `ilike`
def trino_get_tables_in_schema(prj):
    sql = """
            select table_name,
                    case when table_type = 'BASE TABLE' then 'table'
                         when table_type = 'VIEW' then 'view'
                         else table_type
                    end as materialization
            from information_schema.tables
            where {}
            order by table_name
            """
    sql = sql.format("lower({}) like lower('{}')".format("table_schema", prj.test_schema))
    result = prj.run_sql(sql, fetch="all")
    return {model_name: materialization for (model_name, materialization) in result}


class TestEmptyModelsArentRun(EmptyModelsArentRunBase):
    def test_dbt_doesnt_run_empty_models(self, project):
        results = run_dbt(["seed"])
        assert len(results) == 1
        results = run_dbt()
        assert len(results) == 7

        tables = trino_get_tables_in_schema(project)

        assert "empty" not in tables.keys()
        assert "disabled" not in tables.keys()


================================================
FILE: tests/functional/adapter/test_simple_snapshot.py
================================================
import pytest
from dbt.tests.adapter.simple_snapshot.test_snapshot import (
    BaseSimpleSnapshot,
    BaseSnapshotCheck,
)
from dbt.tests.util import run_dbt

iceberg_macro_override_sql = """
{% macro trino__current_timestamp() -%}
    current_timestamp(6)
{%- endmacro %}
"""


class TrinoSimpleSnapshot(BaseSimpleSnapshot):
    def test_updates_are_captured_by_snapshot(self, project):
        """
        Update the last 5 records. Show that all ids are current, but the last 5 reflect updates.
        """
        self.update_fact_records(
            {"updated_at": "updated_at + interval '1' day"}, "id between 16 and 20"
        )
        run_dbt(["snapshot"])
        self._assert_results(
            ids_with_current_snapshot_records=range(1, 21),
            ids_with_closed_out_snapshot_records=range(16, 21),
        )

    def test_new_column_captured_by_snapshot(self, project):
        """
        Add a column to `fact` and populate the last 10 records with a non-null value.
        Show that all ids are current, but the last 10 reflect updates and the first 10 don't
        i.e. if the column is added, but not updated, the record doesn't reflect that it's updated
        """
        self.add_fact_column("full_name", "varchar(200)")
        self.update_fact_records(
            {
                "full_name": "first_name || ' ' || last_name",
                "updated_at": "updated_at + interval '1' day",
            },
            "id between 11 and 20",
        )
        run_dbt(["snapshot"])
        self._assert_results(
            ids_with_current_snapshot_records=range(1, 21),
            ids_with_closed_out_snapshot_records=range(11, 21),
        )


class TrinoSnapshotCheck(BaseSnapshotCheck):
    def test_column_selection_is_reflected_in_snapshot(self, project):
        """
        Update the first 10 records on a non-tracked column.
        Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both)
        Show that all ids are current, and only the tracked column updates are reflected in `snapshot`.
        """
        self.update_fact_records(
            {"last_name": "substring(last_name, 1, 3)"}, "id between 1 and 10"
        )  # not tracked
        self.update_fact_records(
            {"email": "substring(email, 1, 3)"}, "id between 6 and 15"
        )  # tracked
        run_dbt(["snapshot"])
        self._assert_results(
            ids_with_current_snapshot_records=range(1, 21),
            ids_with_closed_out_snapshot_records=range(6, 16),
        )


@pytest.mark.iceberg
class TestIcebergSimpleSnapshot(TrinoSimpleSnapshot):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "seeds": {
                "+column_types": {"updated_at": "timestamp(6)"},
            },
        }


@pytest.mark.delta
class TestDeltaSimpleSnapshot(TrinoSimpleSnapshot):
    pass


@pytest.mark.iceberg
class TestIcebergSnapshotCheck(TrinoSnapshotCheck):
    @pytest.fixture(scope="class")
    def macros(self):
        return {"iceberg.sql": iceberg_macro_override_sql}


@pytest.mark.delta
class TestDeltaSnapshotCheck(TrinoSnapshotCheck):
    pass


================================================
FILE: tests/functional/adapter/test_sql_status_output.py
================================================
import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture

seed_csv = """
id,name,some_date
1,Easton,1981-05-20 06:46:51
2,Lillian,1978-09-03 18:10:33
3,Jeremiah,1982-03-11 03:59:51
4,Nolan,1976-05-06 20:21:35
""".lstrip()

model_sql = """
select * from {{ ref('seed') }}
"""


class TestSqlStatusOutput:
    """
    Testing if SQL status output contains update_type and rowcount
    """

    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "materialization_table.sql": model_sql,
            "materialization_view.sql": model_sql,
        }

    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "sql_status_output",
            "models": {
                "sql_status_output": {
                    "materialization_table": {"+materialized": "table"},
                    "materialization_view": {"+materialized": "view"},
                }
            },
        }

    def test_run_seed_test(self, project):
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        results, logs = run_dbt_and_capture(["--no-use-colors", "run"], expect_pass=True)
        assert len(results) == 2
        assert (
            f" of 2 OK created sql table model {project.test_schema}.materialization_table  [CREATE TABLE (4 rows) in "
            in logs
        )
        assert (
            f" of 2 OK created sql view model {project.test_schema}.materialization_view  [CREATE VIEW in "
            in logs
        )


================================================
FILE: tests/functional/adapter/test_table_properties.py
================================================
import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture

from tests.functional.adapter.materialization.fixtures import model_sql, seed_csv


class BaseTableProperties:
    # Everything that goes in the "seeds" directory
    @pytest.fixture(scope="class")
    def seeds(self):
        return {
            "seed.csv": seed_csv,
        }

    # Everything that goes in the "models" directory
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "model.sql": model_sql,
        }


@pytest.mark.iceberg
class TestTableProperties(BaseTableProperties):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "properties_test",
            "models": {
                "+materialized": "table",
                "+properties": {
                    "format": "'PARQUET'",
                    "format_version": "2",
                },
            },
        }

    def test_table_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "WITH (" in logs
        assert "format = 'PARQUET'" in logs
        assert "format_version = 2" in logs


@pytest.mark.iceberg
class TestFileFormatConfig(BaseTableProperties):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "properties_test",
            "models": {
                "+materialized": "table",
                "file_format": "parquet",
            },
        }

    def test_table_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "WITH (" in logs
        assert "format = 'parquet'" in logs


@pytest.mark.iceberg
class TestFileFormatConfigAndFormatTablePropertyFail(BaseTableProperties):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "properties_test",
            "models": {
                "+materialized": "table",
                "+properties": {
                    "format": "'PARQUET'",
                },
                "file_format": "orc",
            },
        }

    def test_table_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False)
        assert len(results) == 1
        assert (
            "You can specify either 'file_format' or 'properties.format' configurations, but not both."
            in logs
        )


@pytest.mark.hive
# Setting `type` property is available only in Starburst Galaxy
# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html
@pytest.mark.skip_profile("trino_starburst")
class TestTableFormatConfig(BaseTableProperties):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "properties_test",
            "models": {
                "+materialized": "table",
                "table_format": "iceberg",
            },
        }

    def test_table_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
        assert len(results) == 1
        assert "WITH (" in logs
        assert "type = 'iceberg'" in logs


@pytest.mark.hive
# Setting `type` property is available only in Starburst Galaxy
# https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html
@pytest.mark.skip_profile("trino_starburst")
class TestTableFormatConfigAndTypeTablePropertyFail(BaseTableProperties):
    # Configuration in dbt_project.yml
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "properties_test",
            "models": {
                "+materialized": "table",
                "+properties": {
                    "type": "'iceberg'",
                },
                "table_format": "iceberg",
            },
        }

    def test_table_properties(self, project):
        # Seed seed
        results = run_dbt(["seed"], expect_pass=True)
        assert len(results) == 1

        # Create model with properties
        results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False)
        assert len(results) == 1
        assert (
            "You can specify either 'table_format' or 'properties.type' configurations, but not both."
            in logs
        )


================================================
FILE: tests/functional/adapter/unit_testing/test_unit_testing.py
================================================
import pytest
from dbt.tests.adapter.unit_testing.test_case_insensitivity import (
    BaseUnitTestCaseInsensivity,
)
from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput
from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes


@pytest.mark.skip_profile("starburst_galaxy")
class TestTrinoUnitTestingTypesTrinoStarburst(BaseUnitTestingTypes):
    @pytest.fixture
    def data_types(self):
        # sql_value, yaml_value
        return [
            ["1", "1"],
            ["'1'", "1"],
            ["true", "true"],
            ["DATE '2020-01-02'", "2020-01-02"],
            ["TIMESTAMP '2013-11-03 00:00:00'", "2013-11-03 00:00:00"],
            ["TIMESTAMP '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"],
            ["DECIMAL '1'", "1"],
            [
                """JSON '{"bar": "baz", "balance": 7.77, "active": false}'""",
                """'{"bar": "baz", "balance": 7.77, "active": false}'""",
            ],
        ]


# JSON type is not supported on object storage connectors
@pytest.mark.skip_profile("trino_starburst")
class TestTrinoUnitTestingTypesGalaxy(BaseUnitTestingTypes):
    @pytest.fixture
    def data_types(self):
        # sql_value, yaml_value
        return [
            ["1", "1"],
            ["'1'", "1"],
            ["true", "true"],
            ["DATE '2020-01-02'", "2020-01-02"],
            ["TIMESTAMP '2013-11-03 00:00:00'", "2013-11-03 00:00:00"],
            ["TIMESTAMP '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"],
            ["DECIMAL '1'", "1"],
        ]


class TestTrinoUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity):
    pass


class TestTrinoUnitTestInvalidInput(BaseUnitTestInvalidInput):
    pass


================================================
FILE: tests/functional/adapter/utils/fixture_date_spine.py
================================================
# If date_spine works properly, there should be no `null` values in the resulting model

models__trino_test_date_spine_sql = """
with generated_dates as (
    {{ date_spine("day", "'2023-09-01'", "'2023-09-10'") }}
), expected_dates as (
    select cast('2023-09-01' as date) as expected
    union all
    select cast('2023-09-02' as date) as expected
    union all
    select cast('2023-09-03' as date) as expected
    union all
    select cast('2023-09-04' as date) as expected
    union all
    select cast('2023-09-05' as date) as expected
    union all
    select cast('2023-09-06' as date) as expected
    union all
    select cast('2023-09-07' as date) as expected
    union all
    select cast('2023-09-08' as date) as expected
    union all
    select cast('2023-09-09' as date) as expected
), joined as (
    select
        generated_dates.date_day,
        expected_dates.expected
    from generated_dates
    left join expected_dates on generated_dates.date_day = expected_dates.expected
)

SELECT * from joined
"""


================================================
FILE: tests/functional/adapter/utils/fixture_get_intervals_between.py
================================================
models__trino_test_get_intervals_between_sql = """
SELECT
  {{ get_intervals_between("'2023-09-01'", "'2023-09-12'", "day") }} as intervals,
  11 as expected

"""


================================================
FILE: tests/functional/adapter/utils/test_data_types.py
================================================
import pytest
from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt
from dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean
from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat
from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt
from dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric
from dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString
from dbt.tests.adapter.utils.data_types.test_type_timestamp import BaseTypeTimestamp


class TestTypeBigInt(BaseTypeBigInt):
    pass


class TestTypeFloat(BaseTypeFloat):
    pass


class TestTypeInt(BaseTypeInt):
    pass


class TestTypeNumeric(BaseTypeNumeric):
    def numeric_fixture_type(self):
        return "decimal(28,6)"


class TestTypeString(BaseTypeString):
    pass


# TODO: Re-enable when https://github.com/trinodb/trino/pull/13981 is merged
@pytest.mark.skip_profile("starburst_galaxy")
class TestTypeTimestamp(BaseTypeTimestamp):
    pass


class TestTypeBoolean(BaseTypeBoolean):
    pass


================================================
FILE: tests/functional/adapter/utils/test_date_spine.py
================================================
import pytest
from dbt.tests.adapter.utils.base_utils import BaseUtils
from dbt.tests.adapter.utils.fixture_date_spine import models__test_date_spine_yml

from tests.functional.adapter.utils.fixture_date_spine import (
    models__trino_test_date_spine_sql,
)


class BaseDateSpine(BaseUtils):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "test_date_spine.yml": models__test_date_spine_yml,
            "test_date_spine.sql": self.interpolate_macro_namespace(
                models__trino_test_date_spine_sql, "date_spine"
            ),
        }


class TestDateSpine(BaseDateSpine):
    pass


================================================
FILE: tests/functional/adapter/utils/test_get_intervals_between.py
================================================
import pytest
from dbt.tests.adapter.utils.base_utils import BaseUtils
from dbt.tests.adapter.utils.fixture_get_intervals_between import (
    models__test_get_intervals_between_yml,
)

from tests.functional.adapter.utils.fixture_get_intervals_between import (
    models__trino_test_get_intervals_between_sql,
)


class BaseGetIntervalsBetween(BaseUtils):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "test_get_intervals_between.yml": models__test_get_intervals_between_yml,
            "test_get_intervals_between.sql": self.interpolate_macro_namespace(
                models__trino_test_get_intervals_between_sql, "get_intervals_between"
            ),
        }


class TestGetIntervalsBetween(BaseGetIntervalsBetween):
    pass


================================================
FILE: tests/functional/adapter/utils/test_timestamps.py
================================================
import pytest
from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps


class TestCurrentTimestampTrino(BaseCurrentTimestamps):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "get_current_timestamp.sql": 'select {{ current_timestamp() }} as "current_timestamp"'
        }

    @pytest.fixture(scope="class")
    def expected_schema(self):
        return {"current_timestamp": "timestamp(3) with time zone"}

    @pytest.fixture(scope="class")
    def expected_sql(self):
        return 'select current_timestamp as "current_timestamp"'


================================================
FILE: tests/functional/adapter/utils/test_utils.py
================================================
import pytest
from dbt.tests.adapter.utils.fixture_datediff import models__test_datediff_yml
from dbt.tests.adapter.utils.test_any_value import BaseAnyValue
from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend
from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat
from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct
from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr
from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText
from dbt.tests.adapter.utils.test_concat import BaseConcat
from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware
from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc
from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd
from dbt.tests.adapter.utils.test_datediff import BaseDateDiff
from dbt.tests.adapter.utils.test_equals import BaseEquals
from dbt.tests.adapter.utils.test_escape_single_quotes import (
    BaseEscapeSingleQuotesQuote,
)
from dbt.tests.adapter.utils.test_except import BaseExcept
from dbt.tests.adapter.utils.test_generate_series import BaseGenerateSeries
from dbt.tests.adapter.utils.test_get_powers_of_two import BaseGetPowersOfTwo
from dbt.tests.adapter.utils.test_hash import BaseHash
from dbt.tests.adapter.utils.test_intersect import BaseIntersect
from dbt.tests.adapter.utils.test_last_day import BaseLastDay
from dbt.tests.adapter.utils.test_length import BaseLength
from dbt.tests.adapter.utils.test_listagg import BaseListagg
from dbt.tests.adapter.utils.test_position import BasePosition
from dbt.tests.adapter.utils.test_replace import BaseReplace
from dbt.tests.adapter.utils.test_right import BaseRight
from dbt.tests.adapter.utils.test_safe_cast import BaseSafeCast
from dbt.tests.adapter.utils.test_split_part import BaseSplitPart
from dbt.tests.adapter.utils.test_string_literal import BaseStringLiteral
from dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod

from tests.functional.adapter.fixture_datediff import (
    models__test_datediff_sql,
    seeds__data_datediff_csv,
)

models__array_append_expected_sql = """
select 1 as id, {{ array_construct([1,2,3,4]) }} as array_col
"""


models__array_append_actual_sql = """
select 1 as id, {{ array_append(array_construct([1,2,3]), 4) }} as array_col
"""

models__array_concat_expected_sql = """
select 1 as id, {{ array_construct([1,2,3,4,5,6]) }} as array_col
"""


models__array_concat_actual_sql = """
select 1 as id, {{ array_concat(array_construct([1,2,3]), array_construct([4,5,6])) }} as array_col
"""


class TestAnyValue(BaseAnyValue):
    pass


# Only partially because of https://github.com/trinodb/trino/issues/13
# No way to concat an array with null or empty array
class TestArrayAppend(BaseArrayAppend):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "actual.sql": models__array_append_actual_sql,
            "expected.sql": models__array_append_expected_sql,
        }


# Only partially because of https://github.com/trinodb/trino/issues/13
# No way to concat an array with null or empty array
class TestArrayConcat(BaseArrayConcat):
    @pytest.fixture(scope="class")
    def models(self):
        return {
            "actual.sql": models__array_concat_actual_sql,
            "expected.sql": models__array_concat_expected_sql,
        }


class TestArrayConstruct(BaseArrayConstruct):
    pass


class TestBoolOr(BaseBoolOr):
    pass


class TestCastBoolToText(BaseCastBoolToText):
    pass


class TestConcat(BaseConcat):
    pass


class TestCurrentTimestamp(BaseCurrentTimestampAware):
    pass


class TestDateAdd(BaseDateAdd):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "test_date_add",
            "seeds": {
                "+column_types": {
                    "from_time": "timestamp(6)",
                    "result": "timestamp(6)",
                },
            },
        }


class TestDateDiff(BaseDateDiff):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "test_date_diff",
            "seeds": {
                "+column_types": {"first_date": "timestamp(6)", "second_date": "timestamp(6)"},
            },
        }

    @pytest.fixture(scope="class")
    def seeds(self):
        return {"data_datediff.csv": seeds__data_datediff_csv}

    @pytest.fixture(scope="class")
    def models(self):
        return {
            "test_datediff.yml": models__test_datediff_yml,
            "test_datediff.sql": self.interpolate_macro_namespace(
                models__test_datediff_sql, "datediff"
            ),
        }


class TestDateTrunc(BaseDateTrunc):
    @pytest.fixture(scope="class")
    def project_config_update(self):
        return {
            "name": "test_date_diff",
            "seeds": {
                "+column_types": {"updated_at": "timestamp(6)"},
            },
        }


class TestEquals(BaseEquals):
    pass


class TestEscapeSingleQuotes(BaseEscapeSingleQuotesQuote):
    pass


class TestExcept(BaseExcept):
    pass


class TestGenerateSeries(BaseGenerateSeries):
    pass


class TestGetPowersOfTwo(BaseGetPowersOfTwo):
    pass


class TestHash(BaseHash):
    pass


class TestIntersect(BaseIntersect):
    pass


class TestLastDay(BaseLastDay):
    pass


class TestLength(BaseLength):
    pass


class TestListagg(BaseListagg):
    pass


class TestPosition(BasePosition):
    pass


class TestReplace(BaseReplace):
    pass


class TestRight(BaseRight):
    pass


class TestSafeCast(BaseSafeCast):
    pass


class TestSplitPart(BaseSplitPart):
    pass


class TestStringLiteral(BaseStringLiteral):
    pass


class TestValidateSqlMethod(BaseValidateSqlMethod):
    pass


================================================
FILE: tests/unit/__init__.py
================================================


================================================
FILE: tests/unit/test_adapter.py
================================================
import string
import unittest
from multiprocessing import get_context
from unittest import TestCase
from unittest.mock import MagicMock, Mock, patch

import agate
import dbt.flags as flags
import trino
from dbt.adapters.exceptions.connection import FailedToConnectError
from dbt_common.clients import agate_helper
from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError

from dbt.adapters.trino import TrinoAdapter
from dbt.adapters.trino.column import TRINO_VARCHAR_MAX_LENGTH, TrinoColumn
from dbt.adapters.trino.connections import (
    HttpScheme,
    TrinoCertificateCredentials,
    TrinoJwtCredentials,
    TrinoKerberosCredentials,
    TrinoLdapCredentials,
    TrinoNoneCredentials,
    TrinoOauthConsoleCredentials,
    TrinoOauthCredentials,
)

from .utils import config_from_parts_or_dicts, mock_connection


class TestTrinoAdapter(unittest.TestCase):
    def setUp(self):
        flags.STRICT_MODE = True

        profile_cfg = {
            "outputs": {
                "test": {
                    "type": "trino",
                    "catalog": "trinodb",
                    "host": "database",
                    "port": 5439,
                    "schema": "dbt_test_schema",
                    "method": "none",
                    "user": "trino_user",
                    "cert": "/path/to/cert",
                    "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                    "http_scheme": "http",
                    "session_properties": {
                        "query_max_run_time": "4h",
                        "exchange_compression": True,
                    },
                }
            },
            "target": "test",
        }

        project_cfg = {
            "name": "X",
            "version": "0.1",
            "profile": "test",
            "project-root": "/tmp/dbt/does-not-exist",
            "quoting": {
                "identifier": False,
                "schema": True,
            },
            "query-comment": "dbt",
            "config-version": 2,
        }

        self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
        self.assertEqual(self.config.query_comment.comment, "dbt")
        self.assertEqual(self.config.query_comment.append, None)

    @property
    def adapter(self):
        self._adapter = TrinoAdapter(self.config, get_context("spawn"))
        return self._adapter

    def test_acquire_connection(self):
        connection = self.adapter.acquire_connection("dummy")
        connection.handle

        self.assertEqual(connection.state, "open")
        self.assertIsNotNone(connection.handle)

    def test_cancel_open_connections_empty(self):
        self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0)

    def test_cancel_open_connections_master(self):
        key = self.adapter.connections.get_thread_identifier()
        self.adapter.connections.thread_connections[key] = mock_connection("master")
        self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0)

    @patch("dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection")
    def test_database_exception(self, get_thread_connection):
        self._setup_mock_exception(
            get_thread_connection, trino.exceptions.ProgrammingError("Syntax error")
        )
        with self.assertRaises(DbtDatabaseError):
            self.adapter.execute("select 1")

    @patch("dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection")
    def test_failed_to_connect_exception(self, get_thread_connection):
        self._setup_mock_exception(
            get_thread_connection,
            trino.exceptions.OperationalError("Failed to establish a new connection"),
        )
        with self.assertRaises(FailedToConnectError):
            self.adapter.execute("select 1")

    @patch("dbt.adapters.trino.TrinoAdapter.ConnectionManager.get_thread_connection")
    def test_dbt_exception(self, get_thread_connection):
        self._setup_mock_exception(get_thread_connection, Exception("Unexpected error"))
        with self.assertRaises(DbtRuntimeError):
            self.adapter.execute("select 1")

    def _setup_mock_exception(self, get_thread_connection, exception):
        connection = mock_connection("master")
        connection.handle = MagicMock()
        cursor = MagicMock()
        cursor.execute = Mock(side_effect=exception)
        connection.handle.cursor = MagicMock(return_value=cursor)
        get_thread_connection.return_value = connection


class TestTrinoAdapterAuthenticationMethods(unittest.TestCase):
    def setUp(self):
        flags.STRICT_MODE = True

    def acquire_connection_with_profile(self, profile):
        profile_cfg = {
            "outputs": {"test": profile},
            "target": "test",
        }

        project_cfg = {
            "name": "X",
            "version": "0.1",
            "profile": "test",
            "project-root": "/tmp/dbt/does-not-exist",
            "quoting": {
                "identifier": False,
                "schema": True,
            },
            "config-version": 2,
        }

        config = config_from_parts_or_dicts(project_cfg, profile_cfg)

        return TrinoAdapter(config, get_context("spawn")).acquire_connection("dummy")

    def assert_default_connection_credentials(self, credentials):
        self.assertEqual(credentials.type, "trino")
        self.assertEqual(credentials.database, "trinodb")
        self.assertEqual(credentials.host, "database")
        self.assertEqual(credentials.port, 5439)
        self.assertEqual(credentials.schema, "dbt_test_schema")
        self.assertEqual(credentials.http_headers, {"X-Trino-Client-Info": "dbt-trino"})
        self.assertEqual(
            credentials.session_properties,
            {"query_max_run_time": "4h", "exchange_compression": True},
        )
        self.assertEqual(credentials.prepared_statements_enabled, True)
        self.assertEqual(credentials.retries, trino.constants.DEFAULT_MAX_ATTEMPTS)

    def test_none_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "schema": "dbt_test_schema",
                "user": "trino_user",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "none"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "http_scheme": "https",
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assert_default_connection_credentials(credentials)
        self.assertIsInstance(credentials, TrinoNoneCredentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(credentials.client_tags, ["dev", "none"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_none_authentication_with_method(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "none",
                "schema": "dbt_test_schema",
                "user": "trino_user",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "none_with_method"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "http_scheme": "https",
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assert_default_connection_credentials(credentials)
        self.assertIsInstance(credentials, TrinoNoneCredentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(credentials.client_tags, ["dev", "none_with_method"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_none_authentication_without_http_scheme(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "none",
                "schema": "dbt_test_schema",
                "user": "trino_user",
                "cert": True,
                "client_tags": ["dev", "without_http_scheme"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assert_default_connection_credentials(credentials)
        self.assertIsInstance(credentials, TrinoNoneCredentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTP)
        self.assertEqual(credentials.cert, True)
        self.assertEqual(credentials.client_tags, ["dev", "without_http_scheme"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_ldap_authentication(self):
        test_cases = [(False, "trino_user"), (True, "impersonated_user")]
        for is_impersonation, expected_user in test_cases:
            connection = self.acquire_connection_with_profile(
                {
                    "type": "trino",
                    "catalog": "trinodb",
                    "host": "database",
                    "port": 5439,
                    "method": "ldap",
                    "schema": "dbt_test_schema",
                    "user": "trino_user",
                    "impersonation_user": "impersonated_user" if is_impersonation else None,
                    "password": "trino_password",
                    "cert": False,
                    "client_tags": ["dev", "ldap"],
                    "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                    "session_properties": {
                        "query_max_run_time": "4h",
                        "exchange_compression": True,
                    },
                    "timezone": "UTC",
                    "suppress_cert_warning": True,
                }
            )
            credentials = connection.credentials
            connection.handle
            self.assertIsInstance(credentials, TrinoLdapCredentials)
            self.assert_default_connection_credentials(credentials)
            self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
            self.assertEqual(credentials.cert, False)
            self.assertEqual(connection.handle.handle.user, expected_user)
            self.assertEqual(credentials.client_tags, ["dev", "ldap"])
            self.assertEqual(credentials.timezone, "UTC")
            self.assertEqual(credentials.suppress_cert_warning, True)

    def test_kerberos_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "kerberos",
                "schema": "dbt_test_schema",
                "user": "trino_user",
                "password": "trino_password",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "kerberos"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assertIsInstance(credentials, TrinoKerberosCredentials)
        self.assert_default_connection_credentials(credentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(credentials.client_tags, ["dev", "kerberos"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_certificate_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "certificate",
                "schema": "dbt_test_schema",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "certificate"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "client_certificate": "/path/to/client_cert",
                "client_private_key": "password",
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assertIsInstance(credentials, TrinoCertificateCredentials)
        self.assertIsInstance(credentials.trino_auth(), trino.auth.CertificateAuthentication)
        self.assertEqual(
            credentials.trino_auth(),
            trino.auth.CertificateAuthentication("/path/to/client_cert", "password"),
        )
        self.assert_default_connection_credentials(credentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(credentials.client_tags, ["dev", "certificate"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_jwt_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "jwt",
                "schema": "dbt_test_schema",
                "cert": "/path/to/cert",
                "jwt_token": "aabbccddeeff",
                "client_tags": ["dev", "jwt"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assertIsInstance(credentials, TrinoJwtCredentials)
        self.assert_default_connection_credentials(credentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(credentials.client_tags, ["dev", "jwt"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_oauth_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "oauth",
                "schema": "dbt_test_schema",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "oauth"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assertIsInstance(credentials, TrinoOauthCredentials)
        self.assert_default_connection_credentials(credentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(connection.credentials.prepared_statements_enabled, True)
        self.assertEqual(credentials.client_tags, ["dev", "oauth"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)

    def test_oauth_console_authentication(self):
        connection = self.acquire_connection_with_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "method": "oauth_console",
                "schema": "dbt_test_schema",
                "cert": "/path/to/cert",
                "client_tags": ["dev", "oauth_console"],
                "http_headers": {"X-Trino-Client-Info": "dbt-trino"},
                "session_properties": {
                    "query_max_run_time": "4h",
                    "exchange_compression": True,
                },
                "timezone": "UTC",
                "suppress_cert_warning": False,
            }
        )
        credentials = connection.credentials
        self.assertIsInstance(credentials, TrinoOauthConsoleCredentials)
        self.assert_default_connection_credentials(credentials)
        self.assertEqual(credentials.http_scheme, HttpScheme.HTTPS)
        self.assertEqual(credentials.cert, "/path/to/cert")
        self.assertEqual(connection.credentials.prepared_statements_enabled, True)
        self.assertEqual(credentials.client_tags, ["dev", "oauth_console"])
        self.assertEqual(credentials.timezone, "UTC")
        self.assertEqual(credentials.suppress_cert_warning, False)


class TestPreparedStatementsEnabled(TestCase):
    def setup_profile(self, credentials):
        profile_cfg = {
            "outputs": {"test": credentials},
            "target": "test",
        }

        project_cfg = {
            "name": "X",
            "version": "0.1",
            "profile": "test",
            "project-root": "/tmp/dbt/does-not-exist",
            "quoting": {
                "identifier": False,
                "schema": True,
            },
            "config-version": 2,
        }

        config = config_from_parts_or_dicts(project_cfg, profile_cfg)
        adapter = TrinoAdapter(config, get_context("spawn"))
        connection = adapter.acquire_connection("dummy")
        return connection

    def test_default(self):
        connection = self.setup_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "schema": "dbt_test_schema",
                "method": "none",
                "user": "trino_user",
                "http_scheme": "http",
            }
        )
        self.assertEqual(connection.credentials.prepared_statements_enabled, True)

    def test_false(self):
        connection = self.setup_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "schema": "dbt_test_schema",
                "method": "none",
                "user": "trino_user",
                "http_scheme": "http",
                "prepared_statements_enabled": False,
            }
        )
        self.assertEqual(connection.credentials.prepared_statements_enabled, False)

    def test_true(self):
        connection = self.setup_profile(
            {
                "type": "trino",
                "catalog": "trinodb",
                "host": "database",
                "port": 5439,
                "schema": "dbt_test_schema",
                "method": "none",
                "user": "trino_user",
                "http_scheme": "http",
                "prepared_statements_enabled": True,
            }
        )
        self.assertEqual(connection.credentials.prepared_statements_enabled, True)


class TestAdapterConversions(TestCase):
    def _get_tester_for(self, column_type):
        if column_type is agate.TimeDelta:  # dbt never makes this!
            return agate.TimeDelta()

        for instance in agate_helper.DEFAULT_TYPE_TESTER._possible_types:
            if isinstance(instance, column_type):
                return instance

        raise ValueError(f"no tester for {column_type}")

    def _make_table_of(self, rows, column_types):
        column_names = list(string.ascii_letters[: len(rows[0])])
        if isinstance(column_types, type):
            column_types = [self._get_tester_for(column_types) for _ in column_names]
        else:
            column_types = [self._get_tester_for(typ) for typ in column_types]
        table = agate.Table(rows, column_names=column_names, column_types=column_types)
        return table


class TestTrinoAdapterConversions(TestAdapterConversions):
    def test_convert_text_type(self):
        rows = [
            ["", "a1", "stringval1"],
            ["", "a2", "stringvalasdfasdfasdfa"],
            ["", "a3", "stringval3"],
        ]
        agate_table = self._make_table_of(rows, agate.Text)
        expected = ["VARCHAR", "VARCHAR", "VARCHAR"]
        for col_idx, expect in enumerate(expected):
            assert TrinoAdapter.convert_text_type(agate_table, col_idx) == expect

    def test_convert_number_type(self):
        rows = [
            ["", "23.98", "-1"],
            ["", "12.78", "-2"],
            ["", "79.41", "-3"],
        ]
        agate_table = self._make_table_of(rows, agate.Number)
        expected = ["INTEGER", "DOUBLE", "INTEGER"]
        for col_idx, expect in enumerate(expected):
            assert TrinoAdapter.convert_number_type(agate_table, col_idx) == expect

    def test_convert_boolean_type(self):
        rows = [
            ["", "false", "true"],
            ["", "false", "false"],
            ["", "false", "true"],
        ]
        agate_table = self._make_table_of(rows, agate.Boolean)
        expected = ["boolean", "boolean", "boolean"]
        for col_idx, expect in enumerate(expected):
            assert TrinoAdapter.convert_boolean_type(agate_table, col_idx) == expect

    def test_convert_datetime_type(self):
        rows = [
            ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"],
            ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"],
            ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"],
        ]
        agate_table = self._make_table_of(
            rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime]
        )
        expected = ["TIMESTAMP", "TIMESTAMP", "TIMESTAMP"]
        for col_idx, expect in enumerate(expected):
            assert TrinoAdapter.convert_datetime_type(agate_table, col_idx) == expect

    def test_convert_date_type(self):
        rows = [
            ["", "2019-01-01", "2019-01-04"],
            ["", "2019-01-02", "2019-01-04"],
            ["", "2019-01-03", "2019-01-04"],
        ]
        agate_table = self._make_table_of(rows, agate.Date)
        expected = ["DATE", "DATE", "DATE"]
        for col_idx, expect in enumerate(expected):
            assert TrinoAdapter.convert_date_type(agate_table, col_idx) == expect


class TestTrinoColumn(unittest.TestCase):
    def test_bound_varchar(self):
        col = TrinoColumn.from_description("my_col", "VARCHAR(100)")
        assert col.column == "my_col"
        assert col.dtype == "VARCHAR"
        assert col.char_size == 100
        # bounded varchars get formatted to lowercase
        assert col.data_type == "varchar(100)"
        assert col.string_size() == 100
        assert col.is_string() is True
        assert col.is_number() is False
        assert col.is_numeric() is False

    def test_unbound_varchar(self):
        col = TrinoColumn.from_description("my_col", "VARCHAR")
        assert col.column == "my_col"
        assert col.dtype == "VARCHAR"
        assert col.char_size is None
        assert col.data_type == "VARCHAR"
        assert col.string_size() == TRINO_VARCHAR_MAX_LENGTH
        assert col.is_string() is True
        assert col.is_number() is False
        assert col.is_numeric() is False


================================================
FILE: tests/unit/utils.py
================================================
"""Unit test utility functions.

Note that all imports should be inside the functions to avoid import/mocking
issues.
"""
import os
from unittest import TestCase, mock

from dbt.config.project import PartialProject
from dbt_common.dataclass_schema import ValidationError


def normalize(path):
    """On windows, neither is enough on its own:

    >>> normcase('C:\\documents/ALL CAPS/subdir\\..')
    'c:\\documents\\all caps\\subdir\\..'
    >>> normpath('C:\\documents/ALL CAPS/subdir\\..')
    'C:\\documents\\ALL CAPS'
    >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..'))
    'c:\\documents\\all caps'
    """
    return os.path.normcase(os.path.normpath(path))


class Obj:
    which = "blah"
    single_threaded = False


def mock_connection(name):
    conn = mock.MagicMock()
    conn.name = name
    return conn


def profile_from_dict(profile, profile_name, cli_vars="{}"):
    from dbt.config import Profile
    from dbt.config.renderer import ProfileRenderer
    from dbt.config.utils import parse_cli_vars

    if not isinstance(cli_vars, dict):
        cli_vars = parse_cli_vars(cli_vars)

    renderer = ProfileRenderer(cli_vars)

    # in order to call dbt's internal profile rendering, we need to set the
    # flags global. This is a bit of a hack, but it's the best way to do it.
    from argparse import Namespace

    from dbt.flags import set_from_args

    set_from_args(Namespace(), None)
    return Profile.from_raw_profile_info(
        profile,
        profile_name,
        renderer,
    )


def project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"):
    from dbt.config.renderer import DbtProjectYamlRenderer
    from dbt.config.utils import parse_cli_vars

    if not isinstance(cli_vars, dict):
        cli_vars = parse_cli_vars(cli_vars)

    renderer = DbtProjectYamlRenderer(profile, cli_vars)

    project_root = project.pop("project-root", os.getcwd())

    partial = PartialProject.from_dicts(
        project_root=project_root,
        project_dict=project,
        packages_dict=packages,
        selectors_dict=selectors,
    )
    return partial.render(renderer)


def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars="{}"):
    from copy import deepcopy

    from dbt.config import Profile, Project, RuntimeConfig
    from dbt.config.utils import parse_cli_vars

    if not isinstance(cli_vars, dict):
        cli_vars = parse_cli_vars(cli_vars)

    if isinstance(project, Project):
        profile_name = project.profile_name
    else:
        profile_name = project.get("profile")

    if not isinstance(profile, Profile):
        profile = profile_from_dict(
            deepcopy(profile),
            profile_name,
            cli_vars,
        )

    if not isinstance(project, Project):
        project = project_from_dict(
            deepcopy(project),
            profile,
            packages,
            selectors,
            cli_vars,
        )

    args = Obj()
    args.vars = cli_vars
    args.profile_dir = "/dev/null"
    return RuntimeConfig.from_parts(project=project, profile=profile, args=args)


def inject_plugin(plugin):
    from dbt.adapters.factory import FACTORY

    key = plugin.adapter.type()
    FACTORY.plugins[key] = plugin


def inject_adapter(value, plugin):
    """Inject the given adapter into the adapter factory, so your hand-crafted
    artisanal adapter will be available from get_adapter() as if dbt loaded it.
    """
    inject_plugin(plugin)
    from dbt.adapters.factory import FACTORY

    key = value.type()
    FACTORY.adapters[key] = value


class ContractTestCase(TestCase):
    ContractType = None

    def setUp(self):
        self.maxDiff = None
        super().setUp()

    def assert_to_dict(self, obj, dct):
        self.assertEqual(obj.to_dict(), dct)

    def assert_from_dict(self, obj, dct, cls=None):
        if cls is None:
            cls = self.ContractType
        self.assertEqual(cls.from_dict(dct), obj)

    def assert_symmetric(self, obj, dct, cls=None):
        self.assert_to_dict(obj, dct)
        self.assert_from_dict(obj, dct, cls)

    def assert_fails_validation(self, dct, cls=None):
        if cls is None:
            cls = self.ContractType

        with self.assertRaises(ValidationError):
            cls.validate(dct)
            cls.from_dict(dct)


def generate_name_macros(package):
    from dbt.contracts.graph.parsed import ParsedMacro
    from dbt.node_types import NodeType

    name_sql = {}
    for component in ("database", "schema", "alias"):
        if component == "alias":
            source = "node.name"
        else:
            source = f"target.{component}"
        name = f"generate_{component}_name"
        sql = f"{{% macro {name}(value, node) %}} {{% if value %}} {{{{ value }}}} {{% else %}} {{{{ {source} }}}} {{% endif %}} {{% endmacro %}}"
        name_sql[name] = sql

    all_sql = "\n".join(name_sql.values())
    for name, sql in name_sql.items():
        pm = ParsedMacro(
            name=name,
            resource_type=NodeType.Macro,
            unique_id=f"macro.{package}.{name}",
            package_name=package,
            original_file_path=normalize("macros/macro.sql"),
            root_path="./dbt_modules/root",
            path=normalize("macros/macro.sql"),
            raw_sql=all_sql,
            macro_sql=sql,
        )
        yield pm


================================================
FILE: tox.ini
================================================
[tox]
skipsdist = True
envlist = unit, integration

[testenv:unit]
description = unit testing
basepython = python3
commands = {envpython} -m pytest -v {posargs} tests/unit
passenv = DBT_INVOCATION_ENV
deps =
    -r{toxinidir}/dev_requirements.txt
    -e.

[testenv:integration]
description = adapter plugin integration testing
basepython = python3
commands = {envpython} -m pytest {posargs} tests/functional
passenv = DBT_INVOCATION_ENV, DBT_TEST_TRINO_HOST, DBT_TEST_USER_1, DBT_TEST_USER_2, DBT_TEST_USER_3
deps =
    -r{toxinidir}/dev_requirements.txt
    -e.