Showing preview only (2,638K chars total). Download the full file or copy to clipboard to get everything.
Repository: snowflakedb/gosnowflake
Branch: master
Commit: a0b59e44724f
Files: 394
Total size: 2.4 MB
Directory structure:
gitextract_osgz45y5/
├── .cursor/
│ └── rules/
│ ├── overall-guidelines.mdc
│ └── testing.mdc
├── .github/
│ ├── CODEOWNERS
│ ├── ISSUE_TEMPLATE/
│ │ ├── BUG_REPORT.md
│ │ └── FEATURE_REQUEST.md
│ ├── ISSUE_TEMPLATE.md
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── repo_meta.yaml
│ ├── secret_scanning.yml
│ └── workflows/
│ ├── build-test.yml
│ ├── changelog.yml
│ ├── cla_bot.yml
│ ├── jira_close.yml
│ ├── jira_comment.yml
│ ├── jira_issue.yml
│ ├── parameters/
│ │ └── public/
│ │ ├── rsa_key_golang_aws.p8.gpg
│ │ ├── rsa_key_golang_azure.p8.gpg
│ │ └── rsa_key_golang_gcp.p8.gpg
│ ├── parameters_aws_auth_tests.json.gpg
│ ├── parameters_aws_golang.json.gpg
│ ├── parameters_azure_golang.json.gpg
│ ├── parameters_gcp_golang.json.gpg
│ ├── rsa-2048-private-key.p8.gpg
│ ├── rsa_keys/
│ │ ├── rsa_key.p8.gpg
│ │ └── rsa_key_invalid.p8.gpg
│ └── semgrep.yml
├── .gitignore
├── .golangci.yml
├── .pre-commit-config.yaml
├── .windsurf/
│ └── rules/
│ └── go.md
├── CHANGELOG.md
├── CONTRIBUTING.md
├── Jenkinsfile
├── LICENSE
├── Makefile
├── README.md
├── SECURITY.md
├── aaa_test.go
├── arrow_chunk.go
├── arrow_stream.go
├── arrow_test.go
├── arrowbatches/
│ ├── batches.go
│ ├── batches_test.go
│ ├── context.go
│ ├── converter.go
│ ├── converter_test.go
│ └── schema.go
├── assert_test.go
├── async.go
├── async_test.go
├── auth.go
├── auth_generic_test_methods_test.go
├── auth_oauth.go
├── auth_oauth_test.go
├── auth_test.go
├── auth_wif.go
├── auth_wif_test.go
├── auth_with_external_browser_test.go
├── auth_with_keypair_test.go
├── auth_with_mfa_test.go
├── auth_with_oauth_okta_authorization_code_test.go
├── auth_with_oauth_okta_client_credentials_test.go
├── auth_with_oauth_snowflake_authorization_code_test.go
├── auth_with_oauth_snowflake_authorization_code_wildcards_test.go
├── auth_with_oauth_test.go
├── auth_with_okta_test.go
├── auth_with_pat_test.go
├── authexternalbrowser.go
├── authexternalbrowser_test.go
├── authokta.go
├── authokta_test.go
├── azure_storage_client.go
├── azure_storage_client_test.go
├── bind_uploader.go
├── bindings_test.go
├── chunk.go
├── chunk_downloader.go
├── chunk_downloader_test.go
├── chunk_test.go
├── ci/
│ ├── _init.sh
│ ├── build.bat
│ ├── build.sh
│ ├── container/
│ │ ├── test_authentication.sh
│ │ └── test_component.sh
│ ├── docker/
│ │ └── rockylinux9/
│ │ └── Dockerfile
│ ├── gofix.sh
│ ├── image/
│ │ ├── Dockerfile
│ │ ├── build.sh
│ │ ├── scripts/
│ │ │ └── entrypoint.sh
│ │ └── update.sh
│ ├── scripts/
│ │ ├── .gitignore
│ │ ├── README.md
│ │ ├── ca.crt
│ │ ├── ca.der
│ │ ├── ca.key
│ │ ├── ca.srl
│ │ ├── execute_tests.sh
│ │ ├── hang_webserver.py
│ │ ├── login_internal_docker.sh
│ │ ├── run_wiremock.sh
│ │ ├── setup_connection_parameters.sh
│ │ ├── setup_gpg.sh
│ │ ├── wiremock-ecdsa-pub.key
│ │ ├── wiremock-ecdsa.crt
│ │ ├── wiremock-ecdsa.csr
│ │ ├── wiremock-ecdsa.key
│ │ ├── wiremock-ecdsa.p12
│ │ ├── wiremock.crt
│ │ ├── wiremock.csr
│ │ ├── wiremock.key
│ │ ├── wiremock.p12
│ │ └── wiremock.v3.ext
│ ├── test.bat
│ ├── test.sh
│ ├── test_authentication.sh
│ ├── test_revocation.sh
│ ├── test_rockylinux9.sh
│ ├── test_rockylinux9_docker.sh
│ ├── test_wif.sh
│ └── wif/
│ └── parameters/
│ ├── parameters_wif.json.gpg
│ ├── rsa_wif_aws_azure.gpg
│ └── rsa_wif_gcp.gpg
├── client.go
├── client_configuration.go
├── client_configuration_test.go
├── client_test.go
├── cmd/
│ ├── arrow/
│ │ ├── .gitignore
│ │ ├── Makefile
│ │ └── transform_batches_to_rows/
│ │ ├── Makefile
│ │ └── transform_batches_to_rows.go
│ ├── logger/
│ │ ├── Makefile
│ │ └── logger.go
│ ├── mfa/
│ │ ├── Makefile
│ │ └── mfa.go
│ ├── programmatic_access_token/
│ │ ├── .gitignore
│ │ ├── Makefile
│ │ └── pat.go
│ ├── tomlfileconnection/
│ │ ├── .gitignore
│ │ └── Makefile
│ └── variant/
│ ├── Makefile
│ └── insertvariantobject.go
├── codecov.yml
├── connection.go
├── connection_configuration_test.go
├── connection_test.go
├── connection_util.go
├── connectivity_diagnosis.go
├── connectivity_diagnosis_test.go
├── connector.go
├── connector_test.go
├── converter.go
├── converter_test.go
├── crl.go
├── crl_test.go
├── ctx_test.go
├── datatype.go
├── datatype_test.go
├── datetime.go
├── datetime_test.go
├── doc.go
├── driver.go
├── driver_ocsp_test.go
├── driver_test.go
├── dsn.go
├── easy_logging.go
├── easy_logging_test.go
├── encrypt_util.go
├── encrypt_util_test.go
├── errors.go
├── errors_test.go
├── file_compression_type.go
├── file_transfer_agent.go
├── file_transfer_agent_test.go
├── file_util.go
├── file_util_test.go
├── function_wrapper_test.go
├── function_wrappers.go
├── gcs_storage_client.go
├── gcs_storage_client_test.go
├── go.mod
├── go.sum
├── gosnowflake.mak
├── heartbeat.go
├── heartbeat_test.go
├── htap.go
├── htap_test.go
├── internal/
│ ├── arrow/
│ │ └── arrow.go
│ ├── compilation/
│ │ ├── cgo_disabled.go
│ │ ├── cgo_enabled.go
│ │ ├── linking_mode.go
│ │ ├── minicore_disabled.go
│ │ └── minicore_enabled.go
│ ├── config/
│ │ ├── assert_test.go
│ │ ├── auth_type.go
│ │ ├── config.go
│ │ ├── config_bool.go
│ │ ├── connection_configuration.go
│ │ ├── connection_configuration_test.go
│ │ ├── crl_mode.go
│ │ ├── dsn.go
│ │ ├── dsn_test.go
│ │ ├── ocsp_mode.go
│ │ ├── priv_key.go
│ │ ├── tls_config.go
│ │ ├── tls_config_test.go
│ │ └── token_accessor.go
│ ├── errors/
│ │ └── errors.go
│ ├── logger/
│ │ ├── accessor.go
│ │ ├── accessor_test.go
│ │ ├── context.go
│ │ ├── easy_logging_support.go
│ │ ├── interfaces.go
│ │ ├── level_filtering.go
│ │ ├── optional_interfaces.go
│ │ ├── proxy.go
│ │ ├── secret_detector.go
│ │ ├── secret_detector_test.go
│ │ ├── secret_masking.go
│ │ ├── secret_masking_test.go
│ │ ├── slog_handler.go
│ │ ├── slog_logger.go
│ │ └── source_location_test.go
│ ├── os/
│ │ ├── libc_info.go
│ │ ├── libc_info_linux.go
│ │ ├── libc_info_notlinux.go
│ │ ├── libc_info_test.go
│ │ ├── os_details.go
│ │ ├── os_details_linux.go
│ │ ├── os_details_notlinux.go
│ │ ├── os_details_test.go
│ │ └── test_data/
│ │ └── sample_os_release
│ ├── query/
│ │ ├── response_types.go
│ │ └── transform.go
│ └── types/
│ └── types.go
├── local_storage_client.go
├── local_storage_client_test.go
├── location.go
├── location_test.go
├── locker.go
├── log.go
├── log_client_test.go
├── log_test.go
├── minicore.go
├── minicore_disabled_test.go
├── minicore_posix.go
├── minicore_provider_darwin_amd64.go
├── minicore_provider_darwin_arm64.go
├── minicore_provider_linux_amd64.go
├── minicore_provider_linux_arm64.go
├── minicore_provider_windows_amd64.go
├── minicore_provider_windows_arm64.go
├── minicore_test.go
├── minicore_windows.go
├── monitoring.go
├── multistatement.go
├── multistatement_test.go
├── ocsp.go
├── ocsp_test.go
├── old_driver_test.go
├── os_specific_posix.go
├── os_specific_windows.go
├── parameters.json.local
├── parameters.json.tmpl
├── permissions_test.go
├── platform_detection.go
├── platform_detection_test.go
├── prepared_statement_test.go
├── priv_key_test.go
├── put_get_test.go
├── put_get_user_stage_test.go
├── put_get_with_aws_test.go
├── query.go
├── restful.go
├── restful_test.go
├── result.go
├── retry.go
├── retry_test.go
├── rows.go
├── rows_test.go
├── s3_storage_client.go
├── s3_storage_client_test.go
├── secret_detector.go
├── secret_detector_test.go
├── secure_storage_manager.go
├── secure_storage_manager_linux.go
├── secure_storage_manager_notlinux.go
├── secure_storage_manager_test.go
├── sflog/
│ ├── interface.go
│ ├── levels.go
│ └── slog.go
├── sqlstate.go
├── statement.go
├── statement_test.go
├── storage_client.go
├── storage_client_test.go
├── storage_file_util_test.go
├── structured_type.go
├── structured_type_arrow_batches_test.go
├── structured_type_read_test.go
├── structured_type_write_test.go
├── telemetry.go
├── telemetry_test.go
├── test_data/
│ ├── .gitignore
│ ├── connections.toml
│ ├── multistatements.sql
│ ├── multistatements_drop.sql
│ ├── orders_100.csv
│ ├── orders_101.csv
│ ├── put_get_1.txt
│ ├── snowflake/
│ │ └── session/
│ │ └── token
│ ├── userdata1.parquet
│ ├── userdata1_orc
│ └── wiremock/
│ └── mappings/
│ ├── auth/
│ │ ├── external_browser/
│ │ │ ├── parallel_login_first_fails_then_successful_flow.json
│ │ │ ├── parallel_login_successful_flow.json
│ │ │ └── successful_flow.json
│ │ ├── mfa/
│ │ │ ├── parallel_login_first_fails_then_successful_flow.json
│ │ │ └── parallel_login_successful_flow.json
│ │ ├── oauth2/
│ │ │ ├── authorization_code/
│ │ │ │ ├── error_from_idp.json
│ │ │ │ ├── invalid_code.json
│ │ │ │ ├── successful_flow.json
│ │ │ │ ├── successful_flow_with_offline_access.json
│ │ │ │ └── successful_flow_with_single_use_refresh_token.json
│ │ │ ├── client_credentials/
│ │ │ │ ├── invalid_client.json
│ │ │ │ └── successful_flow.json
│ │ │ ├── login_request.json
│ │ │ ├── login_request_with_expired_access_token.json
│ │ │ └── refresh_token/
│ │ │ ├── invalid_refresh_token.json
│ │ │ ├── successful_flow.json
│ │ │ └── successful_flow_without_new_refresh_token.json
│ │ ├── password/
│ │ │ ├── invalid_host.json
│ │ │ ├── invalid_password.json
│ │ │ ├── invalid_user.json
│ │ │ ├── successful_flow.json
│ │ │ └── successful_flow_with_telemetry.json
│ │ ├── pat/
│ │ │ ├── invalid_token.json
│ │ │ ├── reading_fresh_token.json
│ │ │ └── successful_flow.json
│ │ └── wif/
│ │ ├── azure/
│ │ │ ├── http_error.json
│ │ │ ├── missing_issuer_claim.json
│ │ │ ├── missing_sub_claim.json
│ │ │ ├── non_json_response.json
│ │ │ ├── successful_flow_azure_functions.json
│ │ │ ├── successful_flow_azure_functions_custom_entra_resource.json
│ │ │ ├── successful_flow_azure_functions_no_client_id.json
│ │ │ ├── successful_flow_azure_functions_v2_issuer.json
│ │ │ ├── successful_flow_basic.json
│ │ │ ├── successful_flow_v2_issuer.json
│ │ │ └── unparsable_token.json
│ │ └── gcp/
│ │ ├── http_error.json
│ │ ├── missing_issuer_claim.json
│ │ ├── missing_sub_claim.json
│ │ ├── successful_flow.json
│ │ ├── successful_impersionation_flow.json
│ │ └── unparsable_token.json
│ ├── close_session.json
│ ├── hang.json
│ ├── minicore/
│ │ └── auth/
│ │ ├── disabled_flow.json
│ │ ├── successful_flow.json
│ │ └── successful_flow_linux.json
│ ├── ocsp/
│ │ ├── auth_failure.json
│ │ ├── malformed.json
│ │ └── unauthorized.json
│ ├── platform_detection/
│ │ ├── aws_ec2_instance_success.json
│ │ ├── aws_identity_success.json
│ │ ├── azure_managed_identity_success.json
│ │ ├── azure_vm_success.json
│ │ ├── gce_identity_success.json
│ │ ├── gce_vm_success.json
│ │ └── timeout_response.json
│ ├── query/
│ │ ├── long_running_query.json
│ │ ├── query_by_id_timeout.json
│ │ ├── query_execution.json
│ │ ├── query_monitoring.json
│ │ ├── query_monitoring_error.json
│ │ ├── query_monitoring_malformed.json
│ │ └── query_monitoring_running.json
│ ├── retry/
│ │ └── redirection_retry_workflow.json
│ ├── select1.json
│ └── telemetry/
│ ├── custom_telemetry.json
│ └── telemetry.json
├── test_utils_test.go
├── tls_config.go
├── tls_config_test.go
├── transaction.go
├── transaction_test.go
├── transport.go
├── transport_test.go
├── url_util.go
├── util.go
├── util_test.go
├── uuid.go
├── value_awaiter.go
├── version.go
└── wiremock_test.go
================================================
FILE CONTENTS
================================================
================================================
FILE: .cursor/rules/overall-guidelines.mdc
================================================
---
alwaysApply: true
---
# Cursor Rules for Go Snowflake Driver
## General Development Standards
### Code Quality
- Follow Go formatting standards (use `gofmt`)
- Use meaningful variable and function names
- Include error handling for all operations that can fail
- Write comprehensive documentation for public APIs
### Project Structure
- Place test files in the same package as the code being tested
- Use `test_data/` directory for test fixtures and sample data
- Group related functionality in logical packages
### Testing
- Test files should be named `*_test.go`
- **For test-specific rules, see `testing.mdc`**
- Write both positive and negative test cases
- Use table-driven tests for testing multiple scenarios
### Code Review Guidelines
- Ensure code follows Go best practices
- Verify comprehensive test coverage
- Check that error messages are descriptive and helpful for debugging
- Validate that public APIs are properly documented
================================================
FILE: .cursor/rules/testing.mdc
================================================
---
alwaysApply: true
---
# Cursor Rules for Go Test Files
This file automatically applies when working on `*_test.go` files.
## Testing Standards
### Assertion Helper Usage
- **ALWAYS** Attempt to use assertion helpers from `assert_test.go` instead of direct `t.Fatal`, `t.Fatalf`, `t.Error`, or `t.Errorf` calls. Where it makes sense, add new assertion helpers.
- **NEVER** write manual if-then-fatal patterns in test functions when a suitable assertion helper exists.
#### Common Assertion Patterns:
**Error Checking:**
```go
// ❌ WRONG
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// ✅ CORRECT
assertNilF(t, err, "Unexpected error")
```
**Nil Checking:**
```go
// ❌ WRONG
if obj == nil {
t.Fatal("Expected non-nil object")
}
// ✅ CORRECT
assertNotNilF(t, obj, "Expected non-nil object")
```
**Equality Checking:**
```go
// ❌ WRONG
if actual != expected {
t.Fatalf("Expected %v, got %v", expected, actual)
}
// ✅ CORRECT
assertEqualF(t, actual, expected, "Values should match")
```
**Error Message Validation:**
```go
// ❌ WRONG
if err.Error() != expectedMsg {
t.Fatalf("Expected error: %s, got: %s", expectedMsg, err.Error())
}
// ✅ CORRECT
assertEqualF(t, err.Error(), expectedMsg, "Error message should match")
```
**Boolean Assertions:**
```go
// ❌ WRONG
if !condition {
t.Fatal("Condition should be true")
}
// ✅ CORRECT
assertTrueF(t, condition, "Condition should be true")
```
#### Helper Function Reference:
Always examine `assertion_helpers.go` for the latest set of helpers. Consider these existing examples below.
- `assertNilF/E(t, value, description)` - Assert value is nil
- `assertNotNilF/E(t, value, description)` - Assert value is not nil
- `assertEqualF/E(t, actual, expected, description)` - Assert equality
- `assertNotEqualF/E(t, actual, expected, description)` - Assert inequality
- `assertTrueF/E(t, value, description)` - Assert boolean is true
- `assertFalseF/E(t, value, description)` - Assert boolean is false
- `assertStringContainsF/E(t, str, substring, description)` - Assert string contains substring
- `assertErrIsF/E(t, actual, expected, description)` - Assert error matches expected error
#### When to Use F vs E:
- Use `F` suffix (Fatal) for critical failures that should stop the test immediately as well as for preconditions
- Use `E` suffix (Error) for non-critical failures that allow the test to continue
## Code Review Guidelines:
- Flag any direct use of `t.Fatal*` or `t.Error*` in new code
- Ensure all test functions use appropriate assertion helpers
- Verify that error messages are descriptive and helpful for debugging
- Check that tests are comprehensive and cover edge cases# Cursor Rules for Go Test Files
================================================
FILE: .github/CODEOWNERS
================================================
* @snowflakedb/Client
/transport.go @snowflakedb/pki-oversight @snowflakedb/Client
/crl.go @snowflakedb/pki-oversight @snowflakedb/Client
/ocsp.go @snowflakedb/pki-oversight @snowflakedb/Client
# GitHub Advanced Security Secret Scanning config
/.github/secret_scanning.yml @snowflakedb/prodsec-security-manager-write
================================================
FILE: .github/ISSUE_TEMPLATE/BUG_REPORT.md
================================================
---
name: Bug Report 🐞
about: Something isn't working as expected? Here is the right place to report.
labels: bug
---
:exclamation: If you need **urgent assistance** then [file a case with Snowflake Support](https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge).
Otherwise continue here.
Please answer these questions before submitting your issue.
In order to accurately debug the issue this information is required. Thanks!
1. What version of GO driver are you using?
2. What operating system and processor architecture are you using?
3. What version of GO are you using?
run `go version` in your console
4.Server version:* E.g. 1.90.1
You may get the server version by running a query:
```
SELECT CURRENT_VERSION();
```
5. What did you do?
If possible, provide a recipe for reproducing the error.
A complete runnable program is good.
6. What did you expect to see?
What should have happened and what happened instead?
7. Can you set logging to DEBUG and collect the logs?
https://community.snowflake.com/s/article/How-to-generate-log-file-on-Snowflake-connectors
Before sharing any information, please be sure to review the log and remove any sensitive
information.
================================================
FILE: .github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
================================================
---
name: Feature Request 💡
about: Suggest a new idea for the project.
labels: feature
---
<!--
If you need urgent assistance then file the feature request using the support process:
https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge
otherwise continue here.
-->
## What is the current behavior?
## What is the desired behavior?
## How would this improve `gosnowflake`?
## References, Other Background
================================================
FILE: .github/ISSUE_TEMPLATE.md
================================================
### Issue description
Tell us what should happen and what happens instead
### Example code
```go
If possible, please enter some example code here to reproduce the issue.
```
### Error log
```
If you have an error log, please paste it here.
```
Add ``glog` option to your application to collect log files.
### Configuration
*Driver version (or git SHA):*
*Go version:* run `go version` in your console
*Server version:* E.g. 1.90.1
You may get the server version by running a query:
```
SELECT CURRENT_VERSION();
```
*Client OS:* E.g. Debian 8.1 (Jessie), Windows 10
================================================
FILE: .github/PULL_REQUEST_TEMPLATE.md
================================================
### Description
SNOW-XXX Please explain the changes you made here.
### Checklist
- [ ] Added proper logging (if possible)
- [ ] Created tests which fail without the change (if possible)
- [ ] Extended the README / documentation, if necessary
================================================
FILE: .github/repo_meta.yaml
================================================
point_of_contact: @snowflakedb/client
production: true
code_owners_file_present: false
jira_area: Developer Platform
================================================
FILE: .github/secret_scanning.yml
================================================
paths-ignore:
- "**/test_data/**"
================================================
FILE: .github/workflows/build-test.yml
================================================
name: Build and Test
permissions:
contents: read
on:
push:
branches:
- master
tags:
- v*
pull_request:
schedule:
- cron: '7 3 * * *'
workflow_dispatch:
inputs:
goTestParams:
default:
description: 'Parameters passed to go test'
sequentialTests:
type: boolean
default: false
description: 'Run tests sequentially (no buffering, slower)'
concurrency:
# older builds for the same pull request numer or branch should be cancelled
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
runs-on: ubuntu-latest
name: Check linter
steps:
- uses: actions/checkout@v4
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.26'
- name: golangci-lint
uses: golangci/golangci-lint-action@v7
with:
version: v2.11
- name: Format, Lint
shell: bash
run: ./ci/build.sh
- name: Run go fix across all platforms and tags
shell: bash
run: ./ci/gofix.sh
build-test-linux:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
go: [ '1.24', '1.25', '1.26' ]
name: ${{ matrix.cloud }} Go ${{ matrix.go }} on Ubuntu
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
SEQUENTIAL_TESTS: ${{ inputs.sequentialTests }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
build-test-linux-no-home:
runs-on: ubuntu-latest
name: Ubuntu - no HOME
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
SEQUENTIAL_TESTS: ${{ inputs.sequentialTests }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
HOME_EMPTY: "yes"
run: ./ci/test.sh
build-test-mac:
runs-on: macos-latest
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
go: [ '1.24', '1.25', '1.26' ]
name: ${{ matrix.cloud }} Go ${{ matrix.go }} on Mac
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
build-test-mac-no-home:
runs-on: macos-latest
name: Mac - no HOME
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
HOME_EMPTY: "yes"
run: ./ci/test.sh
build-test-windows:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
go: [ '1.24', '1.25', '1.26' ]
name: ${{ matrix.cloud }} Go ${{ matrix.go }} on Windows
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- uses: actions/setup-python@v5
with:
python-version: '3.x'
architecture: 'x64'
- name: Test
shell: cmd
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
SEQUENTIAL_TESTS: ${{ inputs.sequentialTests }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ci\\test.bat
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
fipsOnly:
runs-on: ubuntu-latest
strategy:
fail-fast: false
name: FIPS only mode
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go }}
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
TEST_GODEBUG: fips140=only
SEQUENTIAL_TESTS: ${{ inputs.sequentialTests }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
build-test-linux-minicore-disabled:
runs-on: ubuntu-latest
name: Ubuntu - minicore disabled
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }} -tags=minicore_disabled
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
build-test-mac-minicore-disabled:
runs-on: macos-latest
name: Mac - minicore disabled
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GO_TEST_PARAMS: ${{ inputs.goTestParams }} -tags=minicore_disabled
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
build-test-windows-minicore-disabled:
runs-on: windows-latest
name: Windows - minicore disabled
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- uses: actions/setup-python@v5
with:
python-version: '3.x'
architecture: 'x64'
- name: Test
shell: cmd
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GO_TEST_PARAMS: ${{ inputs.goTestParams }} -tags=minicore_disabled
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ci\\test.bat
ecc:
runs-on: ubuntu-latest
strategy:
fail-fast: false
name: Elliptic curves check
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: '1.25'
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: AWS
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }} -run TestQueryViaHttps
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
WIREMOCK_ENABLE_ECDSA: true
run: ./ci/test.sh
build-test-rockylinux9:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloud_go:
- cloud: 'AWS'
go: '1.24.2'
- cloud: 'AZURE'
go: '1.25.0'
- cloud: 'GCP'
go: '1.26.0'
name: ${{ matrix.cloud_go.cloud }} Go ${{ matrix.cloud_go.go }} on Rocky Linux 9
steps:
- uses: actions/checkout@v4
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud_go.cloud }}
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
SEQUENTIAL_TESTS: ${{ inputs.sequentialTests }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test_rockylinux9_docker.sh ${{ matrix.cloud_go.go }}
build-test-ubuntu-arm:
runs-on: ubuntu-24.04-arm
strategy:
fail-fast: false
matrix:
cloud_go:
- cloud: 'AWS'
go: '1.24'
- cloud: 'AZURE'
go: '1.25'
- cloud: 'GCP'
go: '1.26'
name: ${{ matrix.cloud_go.cloud }} Go ${{ matrix.cloud_go.go }} on Ubuntu ARM
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 17
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.cloud_go.go }}
- name: Test
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud_go.cloud }}
GORACE: history_size=7
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ./ci/test.sh
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
build-test-windows-arm:
runs-on: windows-11-arm
strategy:
fail-fast: false
matrix:
cloud_go:
- cloud: 'AWS'
go: '1.24'
- cloud: 'AZURE'
go: '1.25'
- cloud: 'GCP'
go: '1.26'
name: ${{ matrix.cloud_go.cloud }} Go ${{ matrix.cloud_go.cloud }} on Windows ARM
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4 # for wiremock
with:
java-version: 21
distribution: 'temurin'
- name: Setup go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.cloud_go.go }}
- uses: actions/setup-python@v5
with:
python-version: '3.x'
architecture: 'x64'
- name: Test
shell: cmd
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
GOLANG_PRIVATE_KEY_SECRET: ${{ secrets.GOLANG_PRIVATE_KEY_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud_go.cloud }}
GO_TEST_PARAMS: ${{ inputs.goTestParams }}
WIREMOCK_PORT: 14335
WIREMOCK_HTTPS_PORT: 13567
run: ci\\test.bat
- name: Upload test results to Codecov
if: ${{!cancelled()}}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
================================================
FILE: .github/workflows/changelog.yml
================================================
name: Changelog Check
on:
pull_request:
types: [opened, synchronize, labeled, unlabeled]
jobs:
check_change_log:
runs-on: ubuntu-latest
if: ${{!contains(github.event.pull_request.labels.*.name, 'NO-CHANGELOG-UPDATES')}}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Ensure CHANGELOG.md is updated
run: git diff --name-only --diff-filter=ACMRT ${{ github.event.pull_request.base.sha }} ${{ github.sha }} | grep -wq "CHANGELOG.md"
================================================
FILE: .github/workflows/cla_bot.yml
================================================
name: "CLA Assistant"
on:
issue_comment:
types: [created]
pull_request_target:
types: [opened,closed,synchronize]
jobs:
CLAAssistant:
runs-on: ubuntu-latest
permissions:
actions: write
contents: write
pull-requests: write
statuses: write
steps:
- name: "CLA Assistant"
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
uses: contributor-assistant/github-action/@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_BOT_TOKEN }}
with:
path-to-signatures: 'signatures/version1.json'
path-to-document: 'https://github.com/snowflakedb/CLA/blob/main/README.md'
branch: 'main'
allowlist: 'dependabot[bot],github-actions,Jenkins User,_jenkins,sfc-gh-snyk-sca-sa,snyk-bot'
remote-organization-name: 'snowflake-eng'
remote-repository-name: 'cla-db'
================================================
FILE: .github/workflows/jira_close.yml
================================================
name: Jira closure
on:
issues:
types: [closed, deleted]
jobs:
close-issue:
runs-on: ubuntu-latest
steps:
- name: Extract issue from title
id: extract
env:
TITLE: "${{ github.event.issue.title }}"
run: |
jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://')
echo ::set-output name=jira::$jira
- name: Close Jira Issue
if: startsWith(steps.extract.outputs.jira, 'SNOW-')
env:
ISSUE_KEY: ${{ steps.extract.outputs.jira }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
run: |
JIRA_API_URL="${JIRA_BASE_URL}/rest/api/2/issue/${ISSUE_KEY}/transitions"
curl -X POST \
--url "$JIRA_API_URL" \
--user "${JIRA_USER_EMAIL}:${JIRA_API_TOKEN}" \
--header "Content-Type: application/json" \
--data "{
\"update\": {
\"comment\": [
{ \"add\": { \"body\": \"Closed on GitHub\" } }
]
},
\"fields\": {
\"customfield_12860\": { \"id\": \"11506\" },
\"customfield_10800\": { \"id\": \"-1\" },
\"customfield_12500\": { \"id\": \"11302\" },
\"customfield_12400\": { \"id\": \"-1\" },
\"resolution\": { \"name\": \"Done\" }
},
\"transition\": { \"id\": \"71\" }
}"
================================================
FILE: .github/workflows/jira_comment.yml
================================================
name: Jira comment
on:
issue_comment:
types: [created]
jobs:
comment-issue:
runs-on: ubuntu-latest
steps:
- name: Jira login
uses: atlassian/gajira-login@master
env:
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
- name: Extract issue from title
id: extract
env:
TITLE: "${{ github.event.issue.title }}"
run: |
jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://')
echo ::set-output name=jira::$jira
- name: Comment on issue
uses: atlassian/gajira-comment@master
if: startsWith(steps.extract.outputs.jira, 'SNOW-') && github.event.comment.user.login != 'codecov[bot]'
with:
issue: "${{ steps.extract.outputs.jira }}"
comment: "${{ github.event.comment.user.login }} commented:\n\n${{ github.event.comment.body }}\n\n${{ github.event.comment.html_url }}"
================================================
FILE: .github/workflows/jira_issue.yml
================================================
name: Jira creation
on:
issues:
types: [opened]
issue_comment:
types: [created]
jobs:
create-issue:
runs-on: ubuntu-latest
permissions:
issues: write
if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]'))
steps:
- name: Create JIRA Ticket
id: create
env:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
ISSUE_TITLE: ${{ github.event.issue.title }}
ISSUE_BODY: ${{ github.event.issue.body }}
ISSUE_URL: ${{ github.event.issue.html_url }}
run: |
# debug
#set -x
TMP_BODY=$(mktemp)
trap "rm -f $TMP_BODY" EXIT
# Escape special characters in title and body
TITLE=$(echo "${ISSUE_TITLE//`/\\`}" | sed 's/"/\\"/g' | sed "s/'/\\\'/g")
echo "${ISSUE_BODY//`/\\`}" | sed 's/"/\\"/g' | sed "s/'/\\\'/g" > $TMP_BODY
echo -e "\n\n_Created from GitHub Action_ for $ISSUE_URL" >> $TMP_BODY
BODY=$(cat "$TMP_BODY")
PAYLOAD=$(jq -n \
--arg issuetitle "$TITLE" \
--arg issuebody "$BODY" \
'{
fields: {
project: { key: "SNOW" },
issuetype: { name: "Bug" },
summary: $issuetitle,
description: $issuebody,
customfield_11401: { id: "14723" },
assignee: { id: "712020:e527ae71-55cc-4e02-9217-1ca4ca8028a2" },
components: [{ id: "19286" }],
labels: ["oss"],
priority: { id: "10001" }
}
}')
# Create JIRA issue using REST API
RESPONSE=$(curl -s -X POST \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-u "$JIRA_USER_EMAIL:$JIRA_API_TOKEN" \
"$JIRA_BASE_URL/rest/api/2/issue" \
-d "$PAYLOAD")
# Extract JIRA issue key from response
JIRA_KEY=$(echo "$RESPONSE" | jq -r '.key')
if [ "$JIRA_KEY" = "null" ] || [ -z "$JIRA_KEY" ]; then
echo "Failed to create JIRA issue"
echo "Response: $RESPONSE"
echo "Request payload: $PAYLOAD"
exit 1
fi
echo "Created JIRA issue: $JIRA_KEY"
echo "jira_key=$JIRA_KEY" >> $GITHUB_OUTPUT
- name: Update GitHub Issue
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPOSITORY: ${{ github.repository }}
ISSUE_NUMBER: ${{ github.event.issue.number }}
JIRA_KEY: ${{ steps.create.outputs.jira_key }}
ISSUE_TITLE: ${{ github.event.issue.title }}
run: |
TITLE=$(echo "${ISSUE_TITLE//`/\\`}" | sed 's/"/\\"/g' | sed "s/'/\\\'/g")
PAYLOAD=$(jq -n \
--arg issuetitle "$TITLE" \
--arg jirakey "$JIRA_KEY" \
'{
title: ($jirakey + ": " + $issuetitle)
}')
# Update Github issue title with jira id
curl -s \
-X PATCH \
-H "Authorization: Bearer $GITHUB_TOKEN" \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"https://api.github.com/repos/$REPOSITORY/issues/$ISSUE_NUMBER" \
-d "$PAYLOAD"
if [ "$?" != 0 ]; then
echo "Failed to update GH issue. Payload was:"
echo "$PAYLOAD"
exit 1
fi
================================================
FILE: .github/workflows/semgrep.yml
================================================
name: Run semgrep checks
on:
pull_request:
branches: [main, master]
permissions:
contents: read
jobs:
run-semgrep-reusable-workflow:
uses: snowflakedb/reusable-workflows/.github/workflows/semgrep-v2.yml@main
secrets:
token: ${{ secrets.SEMGREP_APP_TOKEN }}
================================================
FILE: .gitignore
================================================
*.DS_Store
.idea/
.vscode/
parameters*.json
parameters*.bat
*.p8
coverage.txt
fuzz-*/
/select1
/selectmany
/verifycert
wss-golang-agent.config
wss-unified-agent.jar
whitesource/
*.swp
cp.out
__debug_bin*
test-output.txt
test-report.junit.xml
# exclude vendor
vendor
# SSH private key for WIF tests
ci/wif/parameters/rsa_wif_aws_azure
ci/wif/parameters/rsa_wif_gcp
================================================
FILE: .golangci.yml
================================================
version: "2"
run:
tests: true
linters:
exclusions:
rules:
- path: "_test.go"
linters:
- errcheck
- path: "cmd/"
linters:
- errcheck
- path: "_test.go"
linters:
- staticcheck
text: "implement StmtQueryContext"
- path: "_test.go"
linters:
- staticcheck
text: "implement StmtExecContext"
- linters:
- staticcheck
text: "QF1001"
- linters:
- staticcheck
text: "SA1019: .+\\.(LoginTimeout|RequestTimeout|JWTExpireTimeout|ClientTimeout|JWTClientTimeout|ExternalBrowserTimeout|CloudStorageTimeout|Tracing) is deprecated"
================================================
FILE: .pre-commit-config.yaml
================================================
repos:
- repo: git@github.com:snowflakedb/casec_precommit.git # SSH
# - repo: https://github.com/snowflakedb/casec_precommit.git # HTTPS
rev: v1.5
hooks:
- id: snapps-secret-scanner
================================================
FILE: .windsurf/rules/go.md
================================================
---
trigger: glob
description:
globs: **/*.go
---
# Go files rules
## General
1. Unless it's necessary or told otherwise, try reusing existing files, both for implementation and tests.
2. If possible, try running relevant tests.
## Tests
1. Create a test file with the name same as prod code file by default.
2. For assertions use our test helpers defined in assert_test.go.
## Logging
1. Add reasonable logging - don't repeat logs, but add them when it's meaningful.
2. Always consider log levels.
================================================
FILE: CHANGELOG.md
================================================
# Changelog
## Upcoming release
Bug fixes:
- Fixed empty `Account` when connecting with programmatic `Config` and `database/sql.Connector` by deriving `Account` from the first DNS label of `Host` in `FillMissingConfigParameters` when `Host` matches the Snowflake hostname pattern (snowflakedb/gosnowflake#1772).
## 2.0.1
Bug fixes:
- Fixed default `CrlDownloadMaxSize` to be 20MB instead of 200MB, as the previous value was set too high and could cause out-of-memory issues (snowflakedb/gosnowflake#1735).
- Replaced global `paramsMutex` with per-connection `syncParams` to encapsulate parameter synchronization and avoid cross-connection contention (snowflakedb/gosnoflake#1747).
- `Config.Params` map is not modified anymore, to avoid changing parameter values across connections of the same connection pool (snowflakedb/gosnowflake#1747).
- Set `BlobContentMD5` on Azure uploads so that multi-part uploads have the blob content-MD5 property populated (snowflakedb/gosnowflake#1757).
- Fixed 403 errors from Google/GCP/GCS PUT queries on versioned stages (snowflakedb/gosnowflake#1760).
- Fixed not updating query context cache for failed queries (snowflakedb/gosnowflake#1763).
Internal changes:
- Moved configuration to a dedicated internal package (snowflakedb/gosnowflake#1720).
- Modernized Go syntax idioms throughout the codebase.
- Added libc family, version and dynamic linking marker to client environment telemetry (snowflakedb/gosnowflake#1750).
- Bumped a few libraries to fix vulnerabilities (snowflakedb/gosnowflake#1751, snowflakedb/gosnowflake#1756).
- Depointerised query context cache in `snowflakeConn` (snowflakedb/gosnowflake#1763).
## 2.0.0
Breaking changes:
- Removed `RaisePutGetError` from `SnowflakeFileTransferOptions` - current behaviour is aligned to always raise errors for PUT/GET operations (snowflakedb/gosnowflake#1690).
- Removed `GetFileToStream` from `SnowflakeFileTransferOptions` - using `WithFileGetStream` automatically enables file streaming for GETs (snowflakedb/gosnowflake#1690).
- Renamed `WithFileStream` to `WithFilePutStream` for consistency (snowflakedb/gosnowflake#1690).
- `Array` function now returns error for unsupported types (snowflakedb/gosnowflake#1693).
- `WithMultiStatement` does not return error anymore (snowflakedb/gosnowflake#1693).
- `WithOriginalTimestamp` is removed, use `WithArrowBatchesTimestampOption(UseOriginalTimestamp)` instead (snowflakedb/gosnowflake#1693).
- `WithMapValuesNullable` and `WithArrayValuesNullable` combined into one option `WithEmbeddedValuesNullable` (snowflakedb/gosnowflake#1693).
- Hid streaming chunk downloader. It will be removed completely in the future (snowflakedb/gosnowflake#1696).
- Maximum number of chunk download goroutines is now configured with `CLIENT_PREFETCH_THREADS` session parameter (snowflakedb/gosnowflake#1696)
and default to 4.
- Fixed typo in `GOSNOWFLAKE_SKIP_REGISTRATION` env variable (snowflakedb/gosnowflake#1696).
- Removed `ClientIP` field from `Config` struct. This field was never used and is not needed for any functionality (snowflakedb/gosnowflake#1692).
- Unexported MfaToken and IdToken (snowflakedb/gosnowflake#1692).
- Removed `InsecureMode` field from `Config` struct. Use `DisableOCSPChecks` instead (snowflakedb/gosnowflake#1692).
- Renamed `KeepSessionAlive` field in `Config` struct to `ServerSessionKeepAlive` to adjust with the remaining drivers (snowflakedb/gosnowflake#1692).
- Removed `DisableTelemetry` field from `Config` struct. Use `CLIENT_TELEMETRY_ENABLED` session parameter instead (snowflakedb/gosnowflake#1692).
- Removed stream chunk downloader. Use a regular, default downloader instead. (snowflakedb/gosnowflake#1702).
- Removed `SnowflakeTransport`. Use `Config.Transporter` or simply register your own TLS config with `RegisterTLSConfig` if you just need a custom root certificates set (snowflakedb/gosnowflake#1703).
- Arrow batches changes (snowflakedb/gosnowflake#1706):
- Arrow batches have been extracted to a separate package. It should significantly drop the compilation size for those who don't need arrow batches (~34MB -> ~18MB).
- Removed `GetArrowBatches` from `SnowflakeRows` and `SnowflakeResult`. Use `arrowbatches.GetArrowBatches(rows.(SnowflakeRows))` instead.
- Migrated functions:
- `sf.WithArrowBatchesTimestampOption` -> `arrowbatches.WithTimstampOption`
- `sf.WithArrowBatchesUtf8Validation` -> `arrowbatches.WithUtf8Validation`
- `sf.ArrowSnowflakeTimestampToTime` -> `arrowbatches.ArrowSnowflakeTimestampToTime`
- Logging changes (snowflakedb/gosnowflake#1710):
- Removed Logrus logger and migrated to slog.
- Simplified `SFLogger` interface.
- Added `SFSlogLogger` interface for setting custom slog handler.
Bug fixes:
- The query `context.Context` is now propagated to cloud storage operations for PUT and GET queries, allowing for better cancellation handling (snowflakedb/gosnowflake#1690).
New features:
- Added support for Go 1.26, dropped support for Go 1.23 (snowflakedb/gosnowflake#1707).
- Added support for FIPS-only mode (snowflakedb/gosnowflake#1496).
Bug fixes:
- Added panic recovery block for stage file uploads and downloads operation (snowflakedb/gosnowflake#1687).
- Fixed WIF metadata request from Azure container, manifested with HTTP 400 error (snowflakedb/gosnowflake#1701).
- Fixed SAML authentication port validation bypass in `isPrefixEqual` where the second URL's port was never checked (snowflakedb/gosnowflake#1712).
- Fixed a race condition in OCSP cache clearer (snowflakedb/gosnowflake#1704).
- The query `context.Context` is now propagated to cloud storage operations for PUT and GET queries, allowing for better cancellation handling (snowflakedb/gosnowflake#1690).
- Fixed `tokenFilePath` DSN parameter triggering false validation error claiming both `token` and `tokenFilePath` were specified when only `tokenFilePath` was provided in the DSN string (snowflakedb/gosnowflake#1715).
- Fixed minicore crash (SIGFPE) on fully statically linked Linux binaries by detecting static linking via ELF PT_INTERP inspection and skipping `dlopen` gracefully (snowflakedb/gosnowflake#1721).
Internal changes:
- Moved configuration to a dedicated internal package (snowflakedb/gosnowflake#1720).
## 1.19.0
New features:
- Added ability to disable minicore loading at compile time (snowflakedb/gosnowflake#1679).
- Exposed `tokenFilePath` in `Config` (snowflakedb/gosnowflake#1666).
- `tokenFilePath` is now read for every new connection (snowflakedb/gosnowflake#1666).
- Added support for identity impersonation when using workload identity federation (snowflakedb/gosnowflake#1652, snowflakedb/gosnowflake#1660).
Bug fixes:
- Fixed getting file from an unencrypted stage (snowflakedb/gosnowflake#1672).
- Fixed minicore file name gathering in client environment (snowflakedb/gosnowflake#1661).
- Fixed file descriptor leaks in cloud storage calls (snowflakedb/gosnowflake#1682)
- Fixed path escaping for GCS urls (snowflakedb/gosnowflake#1678).
Internal changes:
- Improved Linux telemetry gathering (snowflakedb/gosnowflake#1677).
- Improved some logs returned from cloud storage clients (snowflakedb/gosnowflake#1665).
## 1.18.1
Bug fixes:
- Handle HTTP307 & 308 in drivers to achieve better resiliency to backend errors (snowflakedb/gosnowflake#1616).
- Create temp directory only if needed during file transfer (snowflakedb/gosnowflake#1647)
- Fix unnecessary user expansion for file paths (snowflakedb/gosnowflake#1646).
Internal changes:
- Remove spammy "telemetry disabled" log messages (snowflakedb/gosnowflake#1638).
- Introduced shared library ([source code](https://github.com/snowflakedb/universal-driver/tree/main/sf_mini_core)) for extended telemetry to identify and prepare testing platform for native rust extensions (snowflakedb/gosnowflake#1629)
## 1.18.0
New features:
- Added validation of CRL `NextUpdate` for freshly downloaded CRLs (snowflakedb/gosnowflake#1617)
- Exposed function to send arbitrary telemetry data (snowflakedb/gosnowflake#1627)
- Added logging of query text and parameters (snowflakedb/gosnowflake#1625)
Bug fixes:
- Fixed a data race error in tests caused by platform_detection init() function (snowflakedb/gosnowflake#1618)
- Make secrets detector initialization thread safe and more maintainable (snowflakedb/gosnowflake#1621)
Internal changes:
- Added ISA to login request telemetry (snowflakedb/gosnowflake#1620)
## 1.17.1
- Fix unsafe reflection of nil pointer on DECFLOAT func in bind uploader (snowflakedb/gosnowflake#1604).
- Added temporary download files cleanup (snowflakedb/gosnowflake#1577)
- Marked fields as deprecated (snowflakedb/gosnowflake#1556)
- Exposed `QueryStatus` from `SnowflakeResult` and `SnowflakeRows` in `GetStatus()` function (snowflakedb/gosnowflake#1556)
- Split timeout settings into separate groups based on target service types (snowflakedb/gosnowflake#1531)
- Added small clarification in oauth.go example on token escaping (snowflakedb/gosnowflake#1574)
- Ensured proper permissions for CRL cache directory (snowflakedb/gosnowflake#1588)
- Added `CrlDownloadMaxSize` to limit the size of CRL downloads (snowflakedb/gosnowflake#1588)
- Added platform telemetry to login requests. Can be disabled with `SNOWFLAKE_DISABLE_PLATFORM_DETECTION` environment variable (snowflakedb/gosnowflake#1601)
- Bypassed proxy settings for WIF metadata requests (snowflakedb/gosnowflake#1593)
- Fixed a bug where GCP PUT/GET operations would fail when the connection context was cancelled (snowflakedb/gosnowflake#1584)
- Fixed nil pointer dereference while calling long-running queries (snowflakedb/gosnowflake#1592) (snowflakedb/gosnowflake#1596)
- Moved keyring-based secure storage manager into separate file to avoid the need to initialize keyring on Linux. (snowflakedb/gosnowflake#1595)
- Enabling official support for RHEL9 by testing and enabling CI/CD checks for Rocky Linux in CICD, (snowflakedb/gosnowflake#1597)
- Improve logging (snowflakedb/gosnowflake#1570)
## 1.17.0
- Added ability to configure OCSP per connection (snowflakedb/gosnowflake#1528)
- Added `DECFLOAT` support, see details in `doc.go` (snowflakedb/gosnowflake#1504, snowflakedb/gosnowflake#1506)
- Added support for Go 1.25, dropped support for Go 1.22 (snowflakedb/gosnowflake#1544)
- Added proxy options to connection parameters (snowflakedb/gosnowflake#1511)
- Added `client_session_keep_alive_heartbeat_frequency` connection param (snowflakedb/gosnowflake#1576)
- Added support for multi-part downloads for S3, Azure and GCP (snowflakedb/gosnowflake#1549)
- Added `singleAuthenticationPrompt` to control whether only one authentication should be performed at the same time for authentications that need human interactions (like MFA or OAuth authorization code). Default is true. (snowflakedb/gosnowflake#1561)
- Fixed missing `DisableTelemetry` option in connection parameters (snowflakedb/gosnowflake#1520)
- Fixed multistatements in large result sets (snowflakedb/gosnowflake#1539, snowflakedb/gosnowflake#1543, snowflakedb/gosnowflake#1547)
- Fixed unnecessary retries when context is cancelled (snowflakedb/gosnowflake#1540)
- Fixed regression in TOML connection file (snowflakedb/gosnowflake#1530)
## Prior Releases
Release notes available at https://docs.snowflake.com/en/release-notes/clients-drivers/golang
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing Guidelines
## Reporting Issues
Before creating a new Issue, please check first if a similar Issue [already exists](https://github.com/snowflakedb/gosnowflake/issues?state=open) or was [recently closed](https://github.com/snowflakedb/gosnowflake/issues?direction=desc&page=1&sort=updated&state=closed).
## Contributing Code
By contributing to this project, you share your code under the Apache License 2, as specified in the LICENSE file.
### Code Review
Everyone is invited to review and comment on pull requests.
If it looks fine to you, comment with "LGTM" (Looks good to me).
If changes are required, notice the reviewers with "PTAL" (Please take another look) after committing the fixes.
Before merging the Pull Request, at least one Snowflake team member must have commented with "LGTM".
================================================
FILE: Jenkinsfile
================================================
@Library('pipeline-utils')
import com.snowflake.DevEnvUtils
import groovy.json.JsonOutput
timestamps {
node('high-memory-node') {
stage('checkout') {
scmInfo = checkout scm
println("${scmInfo}")
env.GIT_BRANCH = scmInfo.GIT_BRANCH
env.GIT_COMMIT = scmInfo.GIT_COMMIT
}
params = [
string(name: 'svn_revision', value: 'temptest-deployed'),
string(name: 'branch', value: 'main'),
string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT),
string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH),
string(name: 'TARGET_DOCKER_TEST_IMAGE', value: 'go-chainguard-go1_24'),
string(name: 'parent_job', value: env.JOB_NAME),
string(name: 'parent_build_number', value: env.BUILD_NUMBER)
]
stage('Authenticate Artifactory') {
script {
new DevEnvUtils().withSfCli {
sh "sf artifact oci auth"
}
}
}
parallel(
'Test': {
stage('Test') {
build job: 'RT-LanguageGo-PC', parameters: params
}
},
'Test Authentication': {
stage('Test Authentication') {
withCredentials([
string(credentialsId: 'sfctest0-parameters-secret', variable: 'PARAMETERS_SECRET')
]) {
sh '''\
|#!/bin/bash -e
|$WORKSPACE/ci/test_authentication.sh
'''.stripMargin()
}
}
},
'Test WIF Auth': {
stage('Test WIF Auth') {
withCredentials([
string(credentialsId: 'sfctest0-parameters-secret', variable: 'PARAMETERS_SECRET'),
]) {
sh '''\
|#!/bin/bash -e
|$WORKSPACE/ci/test_wif.sh
'''.stripMargin()
}
}
},
'Test Revocation Validation': {
stage('Test Revocation Validation') {
withCredentials([
usernamePassword(credentialsId: 'jenkins-snowflakedb-github-app',
usernameVariable: 'GITHUB_USER',
passwordVariable: 'GITHUB_TOKEN')
]) {
try {
sh '''\
|#!/bin/bash -e
|chmod +x $WORKSPACE/ci/test_revocation.sh
|$WORKSPACE/ci/test_revocation.sh
'''.stripMargin()
} finally {
archiveArtifacts artifacts: 'revocation-results.json,revocation-report.html', allowEmptyArchive: true
publishHTML(target: [
allowMissing: true,
alwaysLinkToLastBuild: true,
keepAll: true,
reportDir: '.',
reportFiles: 'revocation-report.html',
reportName: 'Revocation Validation Report'
])
}
}
}
}
)
}
}
pipeline {
agent { label 'high-memory-node' }
options { timestamps() }
environment {
COMMIT_SHA_LONG = sh(returnStdout: true, script: "echo \$(git rev-parse " + "HEAD)").trim()
// environment variables for semgrep_agent (for findings / analytics page)
// remove .git at the end
// remove SCM URL + .git at the end
BASELINE_BRANCH = "${env.CHANGE_TARGET}"
}
stages {
stage('Checkout') {
steps {
checkout scm
}
}
}
}
def wgetUpdateGithub(String state, String folder, String targetUrl, String seconds) {
def ghURL = "https://api.github.com/repos/snowflakedb/gosnowflake/statuses/$COMMIT_SHA_LONG"
def data = JsonOutput.toJson([state: "${state}", context: "jenkins/${folder}",target_url: "${targetUrl}"])
sh "wget ${ghURL} --spider -q --header='Authorization: token $GIT_PASSWORD' --post-data='${data}'"
}
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright (c) 2017-2022 Snowflake Computing Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: Makefile
================================================
NAME:=gosnowflake
VERSION:=$(shell git describe --tags --abbrev=0)
REVISION:=$(shell git rev-parse --short HEAD)
COVFLAGS:=
## Run fmt, lint and test
all: fmt lint cov
include gosnowflake.mak
## Run tests
test_setup: test_teardown
python3 ci/scripts/hang_webserver.py 12345 &
test_teardown:
pkill -9 hang_webserver || true
test: deps test_setup
./ci/scripts/execute_tests.sh
## Run Coverage tests
cov:
make test COVFLAGS="-coverprofile=coverage.txt -covermode=atomic"
## Lint
lint: clint
## Format source codes
fmt: cfmt
@for c in $$(ls cmd); do \
(cd cmd/$$c; make fmt); \
done
## Install sample programs
install:
for c in $$(ls cmd); do \
(cd cmd/$$c; GOBIN=$$GOPATH/bin go install $$c.go); \
done
## Build fuzz tests
fuzz-build:
for c in $$(ls | grep -E "fuzz-*"); do \
(cd $$c; make fuzz-build); \
done
## Run fuzz-dsn
fuzz-dsn:
(cd fuzz-dsn; go-fuzz -bin=./dsn-fuzz.zip -workdir=.)
.PHONY: setup deps update test lint help fuzz-dsn
================================================
FILE: README.md
================================================
## Migrating to v2
**Version 2.0.0 of the Go Snowflake Driver was released on March 3rd, 2026.** This major version includes breaking changes that require code updates when migrating from v1.x.
### Key Changes and Migration Steps
#### 1. Update Import Paths
Update your `go.mod` to use v2:
```sh
go get -u github.com/snowflakedb/gosnowflake/v2
```
Update imports in your code:
```go
// Old (v1)
import "github.com/snowflakedb/gosnowflake"
// New (v2)
import "github.com/snowflakedb/gosnowflake/v2"
```
#### 2. Arrow Batches Moved to Separate Package
The public Arrow batches API now lives in `github.com/snowflakedb/gosnowflake/v2/arrowbatches`.
Importing that sub-package pulls in the additional Arrow compute dependency only for applications
that use Arrow batches directly.
**Migration:**
```go
import (
"context"
"database/sql/driver"
sf "github.com/snowflakedb/gosnowflake/v2"
"github.com/snowflakedb/gosnowflake/v2/arrowbatches"
)
ctx := arrowbatches.WithArrowBatches(context.Background())
var rows driver.Rows
err := conn.Raw(func(x any) error {
rows, err = x.(driver.QueryerContext).QueryContext(ctx, query, nil)
return err
})
if err != nil {
// handle error
}
batches, err := arrowbatches.GetArrowBatches(rows.(sf.SnowflakeRows))
if err != nil {
// handle error
}
```
**Optional helper mapping:**
- `sf.WithArrowBatchesTimestampOption` → `arrowbatches.WithTimestampOption`
- `sf.WithArrowBatchesUtf8Validation` → `arrowbatches.WithUtf8Validation`
- `sf.ArrowSnowflakeTimestampToTime` → `arrowbatches.ArrowSnowflakeTimestampToTime`
- `sf.WithOriginalTimestamp` → `arrowbatches.WithTimestampOption(ctx, arrowbatches.UseOriginalTimestamp)`
#### 3. Configuration Struct Changes
**Renamed fields:**
```go
// Old (v1)
config := &gosnowflake.Config{
KeepSessionAlive: true,
InsecureMode: true,
DisableTelemetry: true,
}
// New (v2)
config := &gosnowflake.Config{
ServerSessionKeepAlive: true, // Renamed for consistency with other drivers
DisableOCSPChecks: true, // Replaces InsecureMode
// DisableTelemetry removed - use CLIENT_TELEMETRY_ENABLED session parameter
}
```
**Removed fields:**
- `ClientIP` - No longer used
- `MfaToken` and `IdToken` - Now unexported
- `DisableTelemetry` - Use `CLIENT_TELEMETRY_ENABLED` session parameter instead
#### 4. Logger Changes
The built-in logger is now based on Go's standard `log/slog`:
```go
logger := gosnowflake.GetLogger()
_ = logger.SetLogLevel("debug")
```
For custom logging, continue implementing `SFLogger`.
If you want to customize the built-in slog handler, type-assert `GetLogger()` to `SFSlogLogger`
and call `SetHandler`.
#### 5. File Transfer Changes
**Configuration options:**
```go
// Old (v1)
options := &gosnowflake.SnowflakeFileTransferOptions{
RaisePutGetError: true,
GetFileToStream: true,
}
ctx = gosnowflake.WithFileStream(ctx, stream)
// New (v2)
// RaisePutGetError removed - errors always raised
// GetFileToStream removed - use WithFileGetStream instead
ctx = gosnowflake.WithFilePutStream(ctx, stream) // Renamed from WithFileStream
ctx = gosnowflake.WithFileGetStream(ctx, stream) // For GET operations
```
#### 6. Context and Function Changes
```go
// Old (v1)
ctx, err := gosnowflake.WithMultiStatement(ctx, 0)
if err != nil {
// handle error
}
// New (v2)
ctx = gosnowflake.WithMultiStatement(ctx, 0) // No error returned
```
```go
// Old (v1)
values := gosnowflake.Array(data)
// New (v2)
values, err := gosnowflake.Array(data) // Now returns error for unsupported types
if err != nil {
// handle error
}
```
#### 7. Nullable Options Combined
```go
// Old (v1)
ctx = gosnowflake.WithMapValuesNullable(ctx)
ctx = gosnowflake.WithArrayValuesNullable(ctx)
// New (v2)
ctx = gosnowflake.WithEmbeddedValuesNullable(ctx) // Handles both maps and arrays
```
#### 8. Session Parameter Changes
**Chunk download workers:**
```go
// Old (v1)
gosnowflake.MaxChunkDownloadWorkers = 10 // Global variable
// New (v2)
// Configure via CLIENT_PREFETCH_THREADS session parameter.
// NOTE: The default is 4.
db.Exec("ALTER SESSION SET CLIENT_PREFETCH_THREADS = 10")
```
#### 9. Transport Configuration
```go
import "crypto/tls"
// Old (v1)
gosnowflake.SnowflakeTransport = yourTransport
// New (v2)
config := &gosnowflake.Config{
Transporter: yourCustomTransport,
}
// Or, if you only need custom TLS settings/certificates:
tlsConfig := &tls.Config{
// ...
}
_ = gosnowflake.RegisterTLSConfig("custom", tlsConfig)
config.TLSConfigName = "custom"
```
#### 10. Environment Variable Fix
If you use the skip registration environment variable:
```sh
# Old (v1)
GOSNOWFLAKE_SKIP_REGISTERATION=true # Note the typo
# New (v2)
GOSNOWFLAKE_SKIP_REGISTRATION=true # Typo fixed
```
### Additional Resources
- Full list of changes: See [CHANGELOG.md](./CHANGELOG.md)
- Questions or issues: [GitHub Issues](https://github.com/snowflakedb/gosnowflake/issues)
## Support
For official support and urgent, production-impacting issues, please [contact Snowflake Support](https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge).
# Go Snowflake Driver
<a href="https://codecov.io/github/snowflakedb/gosnowflake?branch=master">
<img alt="Coverage" src="https://codecov.io/github/snowflakedb/gosnowflake/coverage.svg?branch=master">
</a>
<a href="https://github.com/snowflakedb/gosnowflake/actions?query=workflow%3A%22Build+and+Test%22">
<img src="https://github.com/snowflakedb/gosnowflake/workflows/Build%20and%20Test/badge.svg?branch=master">
</a>
<a href="http://www.apache.org/licenses/LICENSE-2.0.txt">
<img src="http://img.shields.io/:license-Apache%202-brightgreen.svg">
</a>
<a href="https://goreportcard.com/report/github.com/snowflakedb/gosnowflake">
<img src="https://goreportcard.com/badge/github.com/snowflakedb/gosnowflake">
</a>
This topic provides instructions for installing, running, and modifying the Go Snowflake Driver. The driver supports Go's [database/sql](https://golang.org/pkg/database/sql/) package.
# Prerequisites
The following software packages are required to use the Go Snowflake Driver.
## Go
The latest driver requires the [Go language](https://golang.org/) 1.24 or higher. The supported operating systems are 64-bits Linux, Mac OS, and Windows, but you may run the driver on other platforms if the Go language works correctly on those platforms.
# Installation
If you don't have a project initialized, set it up.
```sh
go mod init example.com/snowflake
```
Get Gosnowflake source code, if not installed.
```sh
go get -u github.com/snowflakedb/gosnowflake/v2
```
# Docs
For detailed documentation and basic usage examples, please see the documentation at
[godoc.org](https://godoc.org/github.com/snowflakedb/gosnowflake/v2).
## Notes
This driver currently does not support GCP regional endpoints. Please ensure that any workloads using through this driver do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support.
The driver uses Rust library called sf_mini_core, you can find its source code [here](https://github.com/snowflakedb/universal-driver/tree/main/sf_mini_core)
# Sample Programs
Snowflake provides a set of sample programs to test with. Set the environment variable ``$GOPATH`` to the top directory of your workspace, e.g., ``~/go`` and make certain to
include ``$GOPATH/bin`` in the environment variable ``$PATH``. Run the ``make`` command to build all sample programs.
```sh
make install
```
In the following example, the program ``select1.go`` is built and installed in ``$GOPATH/bin`` and can be run from the command line:
```sh
SNOWFLAKE_TEST_ACCOUNT=<your_account> \
SNOWFLAKE_TEST_USER=<your_user> \
SNOWFLAKE_TEST_PASSWORD=<your_password> \
select1
Congrats! You have successfully run SELECT 1 with Snowflake DB!
```
# Development
The developer notes are hosted with the source code on [GitHub](https://github.com/snowflakedb/gosnowflake/v2).
## Testing Code
Set the Snowflake connection info in ``parameters.json``:
```json
{
"testconnection": {
"SNOWFLAKE_TEST_USER": "<your_user>",
"SNOWFLAKE_TEST_PASSWORD": "<your_password>",
"SNOWFLAKE_TEST_ACCOUNT": "<your_account>",
"SNOWFLAKE_TEST_WAREHOUSE": "<your_warehouse>",
"SNOWFLAKE_TEST_DATABASE": "<your_database>",
"SNOWFLAKE_TEST_SCHEMA": "<your_schema>",
"SNOWFLAKE_TEST_ROLE": "<your_role>",
"SNOWFLAKE_TEST_DEBUG": "false"
}
}
```
Install [jq](https://stedolan.github.io/jq) so that the parameters can get parsed correctly, and run ``make test`` in your Go development environment:
```sh
make test
```
### Setting debug mode during tests
This is for debugging Large SQL statements (greater than 300 characters). If you want to enable debug mode, set `SNOWFLAKE_TEST_DEBUG` to `true` in `parameters.json`, or export it in your shell instance.
## customizing Logging Tags
If you would like to ensure that certain tags are always present in the logs, `RegisterClientLogContextHook` can be used in your init function. See example below.
```go
import "github.com/snowflakedb/gosnowflake/v2"
func init() {
// each time the logger is used, the logs will contain a REQUEST_ID field with requestID the value extracted
// from the context
gosnowflake.RegisterClientLogContextHook("REQUEST_ID", func(ctx context.Context) interface{} {
return requestIdFromContext(ctx)
})
}
```
## Setting Log Level
If you want to change the log level, `SetLogLevel` can be used in your init function like this:
```go
import "github.com/snowflakedb/gosnowflake/v2"
func init() {
// The following line changes the log level to debug
_ = gosnowflake.GetLogger().SetLogLevel("debug")
}
```
The following is a list of options you can pass in to set the level from least to most verbose:
- `"OFF"`
- `"fatal"`
- `"error"`
- `"warn"`
- `"info"`
- `"debug"`
- `"trace"`
## Capturing Code Coverage
Configure your testing environment as described above and run ``make cov``. The coverage percentage will be printed on the console when the testing completes.
```sh
make cov
```
For more detailed analysis, results are printed to ``coverage.txt`` in the project directory.
To read the coverage report, run:
```sh
go tool cover -html=coverage.txt
```
## Submitting Pull Requests
You may use your preferred editor to edit the driver code. Make certain to run ``make fmt lint`` before submitting any pull request to Snowflake. This command formats your source code according to the standard Go style and detects any coding style issues.
================================================
FILE: SECURITY.md
================================================
# Security Policy
Please refer to the Snowflake [HackerOne program](https://hackerone.com/snowflake?type=team) for our security policies and for reporting any security vulnerabilities.
For other security related questions and concerns, please contact the Snowflake security team at security@snowflake.com
================================================
FILE: aaa_test.go
================================================
package gosnowflake
import (
"testing"
)
func TestShowServerVersion(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
rows := dbt.mustQuery("SELECT CURRENT_VERSION()")
defer func() {
assertNilF(t, rows.Close())
}()
var version string
rows.Next()
assertNilF(t, rows.Scan(&version))
println(version)
})
}
================================================
FILE: arrow_chunk.go
================================================
package gosnowflake
import (
"bytes"
"context"
"encoding/base64"
"github.com/snowflakedb/gosnowflake/v2/internal/query"
"time"
"github.com/apache/arrow-go/v18/arrow"
"github.com/apache/arrow-go/v18/arrow/ipc"
"github.com/apache/arrow-go/v18/arrow/memory"
)
type arrowResultChunk struct {
reader *ipc.Reader
rowCount int
loc *time.Location
allocator memory.Allocator
}
func (arc *arrowResultChunk) decodeArrowChunk(ctx context.Context, rowType []query.ExecResponseRowType, highPrec bool, params *syncParams) ([]chunkRowType, error) {
defer arc.reader.Release()
logger.Debug("Arrow Decoder")
var chunkRows []chunkRowType
for arc.reader.Next() {
record := arc.reader.Record()
start := len(chunkRows)
numRows := int(record.NumRows())
logger.Debugf("rows in current record: %v", numRows)
columns := record.Columns()
chunkRows = append(chunkRows, make([]chunkRowType, numRows)...)
for i := start; i < start+numRows; i++ {
chunkRows[i].ArrowRow = make([]snowflakeValue, len(columns))
}
for colIdx, col := range columns {
values := make([]snowflakeValue, numRows)
if err := arrowToValues(ctx, values, rowType[colIdx], col, arc.loc, highPrec, params); err != nil {
return nil, err
}
for i := range values {
chunkRows[start+i].ArrowRow[colIdx] = values[i]
}
}
arc.rowCount += numRows
}
logger.Debugf("The number of chunk rows: %v", len(chunkRows))
return chunkRows, arc.reader.Err()
}
// decodeArrowBatchRaw reads raw (untransformed) arrow records from the IPC reader.
// The records are not transformed with arrow-compute; the arrowbatches sub-package
// handles transformation when the user calls ArrowBatch.Fetch().
func (arc *arrowResultChunk) decodeArrowBatchRaw() (*[]arrow.Record, error) {
var records []arrow.Record
defer arc.reader.Release()
for arc.reader.Next() {
record := arc.reader.Record()
record.Retain()
records = append(records, record)
}
return &records, arc.reader.Err()
}
// Build arrow chunk based on RowSet of base64
func buildFirstArrowChunk(rowsetBase64 string, loc *time.Location, alloc memory.Allocator) (arrowResultChunk, error) {
rowSetBytes, err := base64.StdEncoding.DecodeString(rowsetBase64)
if err != nil {
return arrowResultChunk{}, err
}
rr, err := ipc.NewReader(bytes.NewReader(rowSetBytes), ipc.WithAllocator(alloc))
if err != nil {
return arrowResultChunk{}, err
}
return arrowResultChunk{rr, 0, loc, alloc}, nil
}
================================================
FILE: arrow_stream.go
================================================
package gosnowflake
import (
"bufio"
"bytes"
"compress/gzip"
"context"
"encoding/base64"
"fmt"
"io"
"maps"
"net/http"
"strconv"
"time"
"github.com/apache/arrow-go/v18/arrow/ipc"
"github.com/snowflakedb/gosnowflake/v2/internal/query"
)
// ArrowStreamLoader is a convenience interface for downloading
// Snowflake results via multiple Arrow Record Batch streams.
//
// Some queries from Snowflake do not return Arrow data regardless
// of the settings, such as "SHOW WAREHOUSES". In these cases,
// you'll find TotalRows() > 0 but GetBatches returns no batches
// and no errors. In this case, the data is accessible via JSONData
// with the actual types matching up to the metadata in RowTypes.
type ArrowStreamLoader interface {
GetBatches() ([]ArrowStreamBatch, error)
NextResultSet(ctx context.Context) error
TotalRows() int64
RowTypes() []query.ExecResponseRowType
Location() *time.Location
JSONData() [][]*string
}
// ArrowStreamBatch is a type describing a potentially yet-to-be-downloaded
// Arrow IPC stream. Call GetStream to download and retrieve an io.Reader
// that can be used with ipc.NewReader to get record batch results.
type ArrowStreamBatch struct {
idx int
numrows int64
scd *snowflakeArrowStreamChunkDownloader
Loc *time.Location
rr io.ReadCloser
}
// NumRows returns the total number of rows that the metadata stated should
// be in this stream of record batches.
func (asb *ArrowStreamBatch) NumRows() int64 { return asb.numrows }
// GetStream returns a stream of bytes consisting of an Arrow IPC Record
// batch stream. Close should be called on the returned stream when done
// to ensure no leaked memory.
func (asb *ArrowStreamBatch) GetStream(ctx context.Context) (io.ReadCloser, error) {
if asb.rr == nil {
if err := asb.downloadChunkStreamHelper(ctx); err != nil {
return nil, err
}
}
return asb.rr, nil
}
// streamWrapReader wraps an io.Reader so that Close closes the underlying body.
type streamWrapReader struct {
io.Reader
wrapped io.ReadCloser
}
func (w *streamWrapReader) Close() error {
if cl, ok := w.Reader.(io.ReadCloser); ok {
if err := cl.Close(); err != nil {
return err
}
}
return w.wrapped.Close()
}
func (asb *ArrowStreamBatch) downloadChunkStreamHelper(ctx context.Context) error {
headers := make(map[string]string)
if len(asb.scd.ChunkHeader) > 0 {
maps.Copy(headers, asb.scd.ChunkHeader)
} else {
headers[headerSseCAlgorithm] = headerSseCAes
headers[headerSseCKey] = asb.scd.Qrmk
}
resp, err := asb.scd.FuncGet(ctx, asb.scd.sc, asb.scd.ChunkMetas[asb.idx].URL, headers, asb.scd.sc.rest.RequestTimeout)
if err != nil {
return err
}
if resp.StatusCode != http.StatusOK {
defer func() {
_ = resp.Body.Close()
}()
b, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
_ = b
return &SnowflakeError{
Number: ErrFailedToGetChunk,
SQLState: SQLStateConnectionFailure,
Message: fmt.Sprintf("failed to get chunk. idx: %v", asb.idx),
MessageArgs: []any{asb.idx},
}
}
defer func() {
if asb.rr == nil {
_ = resp.Body.Close()
}
}()
bufStream := bufio.NewReader(resp.Body)
gzipMagic, err := bufStream.Peek(2)
if err != nil {
return err
}
if gzipMagic[0] == 0x1f && gzipMagic[1] == 0x8b {
bufStream0, err := gzip.NewReader(bufStream)
if err != nil {
return err
}
asb.rr = &streamWrapReader{Reader: bufStream0, wrapped: resp.Body}
} else {
asb.rr = &streamWrapReader{Reader: bufStream, wrapped: resp.Body}
}
return nil
}
type snowflakeArrowStreamChunkDownloader struct {
sc *snowflakeConn
ChunkMetas []query.ExecResponseChunk
Total int64
Qrmk string
ChunkHeader map[string]string
FuncGet func(context.Context, *snowflakeConn, string, map[string]string, time.Duration) (*http.Response, error)
RowSet rowSetType
resultIDs []string
}
func (scd *snowflakeArrowStreamChunkDownloader) Location() *time.Location {
if scd.sc != nil {
return getCurrentLocation(&scd.sc.syncParams)
}
return nil
}
func (scd *snowflakeArrowStreamChunkDownloader) TotalRows() int64 { return scd.Total }
func (scd *snowflakeArrowStreamChunkDownloader) RowTypes() []query.ExecResponseRowType {
return scd.RowSet.RowType
}
func (scd *snowflakeArrowStreamChunkDownloader) JSONData() [][]*string {
return scd.RowSet.JSON
}
func (scd *snowflakeArrowStreamChunkDownloader) maybeFirstBatch() ([]byte, error) {
if scd.RowSet.RowSetBase64 == "" {
return nil, nil
}
rowSetBytes, err := base64.StdEncoding.DecodeString(scd.RowSet.RowSetBase64)
if err != nil {
logger.Warnf("skipping first batch as it is not a valid base64 response. %v", err)
return nil, err
}
rr, err := ipc.NewReader(bytes.NewReader(rowSetBytes))
if err != nil {
logger.Warnf("skipping first batch as it is not a valid IPC stream. %v", err)
return nil, err
}
rr.Release()
return rowSetBytes, nil
}
func (scd *snowflakeArrowStreamChunkDownloader) GetBatches() (out []ArrowStreamBatch, err error) {
chunkMetaLen := len(scd.ChunkMetas)
loc := scd.Location()
out = make([]ArrowStreamBatch, chunkMetaLen, chunkMetaLen+1)
toFill := out
rowSetBytes, err := scd.maybeFirstBatch()
if err != nil {
return nil, err
}
if len(rowSetBytes) > 0 {
out = out[:chunkMetaLen+1]
out[0] = ArrowStreamBatch{
scd: scd,
Loc: loc,
rr: io.NopCloser(bytes.NewReader(rowSetBytes)),
}
toFill = out[1:]
}
var totalCounted int64
for i := range toFill {
toFill[i] = ArrowStreamBatch{
idx: i,
numrows: int64(scd.ChunkMetas[i].RowCount),
Loc: loc,
scd: scd,
}
totalCounted += int64(scd.ChunkMetas[i].RowCount)
}
if len(rowSetBytes) > 0 {
out[0].numrows = scd.Total - totalCounted
}
return
}
func (scd *snowflakeArrowStreamChunkDownloader) NextResultSet(ctx context.Context) error {
if !scd.hasNextResultSet() {
return io.EOF
}
resultID := scd.resultIDs[0]
scd.resultIDs = scd.resultIDs[1:]
resultPath := fmt.Sprintf(urlQueriesResultFmt, resultID)
resp, err := scd.sc.getQueryResultResp(ctx, resultPath)
if err != nil {
return err
}
if !resp.Success {
code, err := strconv.Atoi(resp.Code)
if err != nil {
logger.WithContext(ctx).Errorf("error while parsing code: %v", err)
}
return exceptionTelemetry(&SnowflakeError{
Number: code,
SQLState: resp.Data.SQLState,
Message: resp.Message,
QueryID: resp.Data.QueryID,
}, scd.sc)
}
scd.ChunkMetas = resp.Data.Chunks
scd.Total = resp.Data.Total
scd.Qrmk = resp.Data.Qrmk
scd.ChunkHeader = resp.Data.ChunkHeaders
scd.RowSet = rowSetType{
RowType: resp.Data.RowType,
JSON: resp.Data.RowSet,
RowSetBase64: resp.Data.RowSetBase64,
}
return nil
}
func (scd *snowflakeArrowStreamChunkDownloader) hasNextResultSet() bool {
return len(scd.resultIDs) > 0
}
================================================
FILE: arrow_test.go
================================================
package gosnowflake
import (
"bytes"
"context"
"fmt"
"math/big"
"reflect"
"strings"
"testing"
"time"
"github.com/apache/arrow-go/v18/arrow/memory"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
"database/sql/driver"
)
func TestArrowBatchDataProvider(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
ctx := ia.EnableArrowBatches(context.Background())
query := "select '0.1':: DECIMAL(38, 19) as c"
var rows driver.Rows
var err error
err = dbt.conn.Raw(func(x any) error {
queryer, implementsQueryContext := x.(driver.QueryerContext)
assertTrueF(t, implementsQueryContext, "snowflake connection driver does not implement queryerContext")
rows, err = queryer.QueryContext(ctx, query, nil)
return err
})
assertNilF(t, err, "error running select query")
sfRows, isSfRows := rows.(SnowflakeRows)
assertTrueF(t, isSfRows, "rows should be snowflakeRows")
provider, isProvider := sfRows.(ia.BatchDataProvider)
assertTrueF(t, isProvider, "rows should implement BatchDataProvider")
info, err := provider.GetArrowBatches()
assertNilF(t, err, "error getting arrow batch data")
assertNotEqualF(t, len(info.Batches), 0, "should have at least one batch")
// Verify raw records are available for the first batch
batch := info.Batches[0]
assertNotNilF(t, batch.Records, "first batch should have pre-decoded records")
records := *batch.Records
assertNotEqualF(t, len(records), 0, "should have at least one record")
// Verify column 0 has data (raw decimal value)
strVal := records[0].Column(0).ValueStr(0)
assertTrueF(t, len(strVal) > 0, fmt.Sprintf("column should have a value, got: %s", strVal))
})
}
func TestArrowBigInt(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
testcases := []struct {
num string
prec int
sc int
}{
{"10000000000000000000000000000000000000", 38, 0},
{"-10000000000000000000000000000000000000", 38, 0},
{"12345678901234567890123456789012345678", 38, 0}, // #pragma: allowlist secret
{"-12345678901234567890123456789012345678", 38, 0},
{"99999999999999999999999999999999999999", 38, 0},
{"-99999999999999999999999999999999999999", 38, 0},
}
for _, tc := range testcases {
rows := dbt.mustQueryContext(WithHigherPrecision(context.Background()),
fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
if !rows.Next() {
dbt.Error("failed to query")
}
defer rows.Close()
var v *big.Int
if err := rows.Scan(&v); err != nil {
dbt.Errorf("failed to scan. %#v", err)
}
b, ok := new(big.Int).SetString(tc.num, 10)
if !ok {
dbt.Errorf("failed to convert %v big.Int.", tc.num)
}
if v.Cmp(b) != 0 {
dbt.Errorf("big.Int value mismatch: expected %v, got %v", b, v)
}
}
})
}
func TestArrowBigFloat(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
testcases := []struct {
num string
prec int
sc int
}{
{"1.23", 30, 2},
{"1.0000000000000000000000000000000000000", 38, 37},
{"-1.0000000000000000000000000000000000000", 38, 37},
{"1.2345678901234567890123456789012345678", 38, 37},
{"-1.2345678901234567890123456789012345678", 38, 37},
{"9.9999999999999999999999999999999999999", 38, 37},
{"-9.9999999999999999999999999999999999999", 38, 37},
}
for _, tc := range testcases {
rows := dbt.mustQueryContext(WithHigherPrecision(context.Background()),
fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
if !rows.Next() {
dbt.Error("failed to query")
}
defer rows.Close()
var v *big.Float
if err := rows.Scan(&v); err != nil {
dbt.Errorf("failed to scan. %#v", err)
}
prec := v.Prec()
b, ok := new(big.Float).SetPrec(prec).SetString(tc.num)
if !ok {
dbt.Errorf("failed to convert %v to big.Float.", tc.num)
}
if v.Cmp(b) != 0 {
dbt.Errorf("big.Float value mismatch: expected %v, got %v", b, v)
}
}
})
}
func TestArrowIntPrecision(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
dbt.mustExec(forceJSON)
intTestcases := []struct {
num string
prec int
sc int
}{
{"10000000000000000000000000000000000000", 38, 0},
{"-10000000000000000000000000000000000000", 38, 0},
{"12345678901234567890123456789012345678", 38, 0}, // pragma: allowlist secret
{"-12345678901234567890123456789012345678", 38, 0},
{"99999999999999999999999999999999999999", 38, 0},
{"-99999999999999999999999999999999999999", 38, 0},
}
t.Run("arrow_disabled_scan_int64", func(t *testing.T) {
for _, tc := range intTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v int64
if err := rows.Scan(&v); err == nil {
t.Error("should fail to scan")
}
}
})
t.Run("arrow_disabled_scan_string", func(t *testing.T) {
for _, tc := range intTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v string
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
if v != tc.num {
t.Errorf("string value mismatch: expected %v, got %v", tc.num, v)
}
}
})
dbt.mustExec(forceARROW)
t.Run("arrow_enabled_scan_big_int", func(t *testing.T) {
for _, tc := range intTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v string
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
if !strings.EqualFold(v, tc.num) {
t.Errorf("int value mismatch: expected %v, got %v", tc.num, v)
}
}
})
t.Run("arrow_high_precision_enabled_scan_big_int", func(t *testing.T) {
for _, tc := range intTestcases {
rows := dbt.mustQueryContext(WithHigherPrecision(context.Background()), fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v *big.Int
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
b, ok := new(big.Int).SetString(tc.num, 10)
if !ok {
t.Errorf("failed to convert %v big.Int.", tc.num)
}
if v.Cmp(b) != 0 {
t.Errorf("big.Int value mismatch: expected %v, got %v", b, v)
}
}
})
})
}
// TestArrowFloatPrecision tests the different variable types allowed in the
// rows.Scan() method. Note that for lower precision types we do not attempt
// to check the value as precision could be lost.
func TestArrowFloatPrecision(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
dbt.mustExec(forceJSON)
fltTestcases := []struct {
num string
prec int
sc int
}{
{"1.23", 30, 2},
{"1.0000000000000000000000000000000000000", 38, 37},
{"-1.0000000000000000000000000000000000000", 38, 37},
{"1.2345678901234567890123456789012345678", 38, 37},
{"-1.2345678901234567890123456789012345678", 38, 37},
{"9.9999999999999999999999999999999999999", 38, 37},
{"-9.9999999999999999999999999999999999999", 38, 37},
}
t.Run("arrow_disabled_scan_float64", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v float64
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
}
})
t.Run("arrow_disabled_scan_float32", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v float32
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
}
})
t.Run("arrow_disabled_scan_string", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v string
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
if !strings.EqualFold(v, tc.num) {
t.Errorf("int value mismatch: expected %v, got %v", tc.num, v)
}
}
})
dbt.mustExec(forceARROW)
t.Run("arrow_enabled_scan_float64", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v float64
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
}
})
t.Run("arrow_enabled_scan_float32", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v float32
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
}
})
t.Run("arrow_enabled_scan_string", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQuery(fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v string
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
if v != tc.num {
t.Errorf("string value mismatch: expected %v, got %v", tc.num, v)
}
}
})
t.Run("arrow_high_precision_enabled_scan_big_float", func(t *testing.T) {
for _, tc := range fltTestcases {
rows := dbt.mustQueryContext(WithHigherPrecision(context.Background()), fmt.Sprintf(selectNumberSQL, tc.num, tc.prec, tc.sc))
defer rows.Close()
if !rows.Next() {
t.Error("failed to query")
}
var v *big.Float
if err := rows.Scan(&v); err != nil {
t.Errorf("failed to scan. %#v", err)
}
prec := v.Prec()
b, ok := new(big.Float).SetPrec(prec).SetString(tc.num)
if !ok {
t.Errorf("failed to convert %v to big.Float.", tc.num)
}
if v.Cmp(b) != 0 {
t.Errorf("big.Float value mismatch: expected %v, got %v", b, v)
}
}
})
})
}
func TestArrowTimePrecision(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
dbt.mustExec("CREATE TABLE t (col5 TIME(5), col6 TIME(6), col7 TIME(7), col8 TIME(8));")
defer dbt.mustExec("DROP TABLE IF EXISTS t")
dbt.mustExec("INSERT INTO t VALUES ('23:59:59.99999', '23:59:59.999999', '23:59:59.9999999', '23:59:59.99999999');")
rows := dbt.mustQuery("select * from t")
defer rows.Close()
var c5, c6, c7, c8 time.Time
for rows.Next() {
if err := rows.Scan(&c5, &c6, &c7, &c8); err != nil {
t.Errorf("values were not scanned: %v", err)
}
}
nano := 999999990
expected := time.Time{}.Add(23*time.Hour + 59*time.Minute + 59*time.Second + 99*time.Millisecond)
if c8.Unix() != expected.Unix() || c8.Nanosecond() != nano {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c8)
}
if c7.Unix() != expected.Unix() || c7.Nanosecond() != nano-(nano%1e2) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c7)
}
if c6.Unix() != expected.Unix() || c6.Nanosecond() != nano-(nano%1e3) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c6)
}
if c5.Unix() != expected.Unix() || c5.Nanosecond() != nano-(nano%1e4) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c5)
}
dbt.mustExec(`CREATE TABLE t_ntz (
col1 TIMESTAMP_NTZ(1),
col2 TIMESTAMP_NTZ(2),
col3 TIMESTAMP_NTZ(3),
col4 TIMESTAMP_NTZ(4),
col5 TIMESTAMP_NTZ(5),
col6 TIMESTAMP_NTZ(6),
col7 TIMESTAMP_NTZ(7),
col8 TIMESTAMP_NTZ(8)
);`)
defer dbt.mustExec("DROP TABLE IF EXISTS t_ntz")
dbt.mustExec(`INSERT INTO t_ntz VALUES (
'9999-12-31T23:59:59.9',
'9999-12-31T23:59:59.99',
'9999-12-31T23:59:59.999',
'9999-12-31T23:59:59.9999',
'9999-12-31T23:59:59.99999',
'9999-12-31T23:59:59.999999',
'9999-12-31T23:59:59.9999999',
'9999-12-31T23:59:59.99999999'
);`)
rows2 := dbt.mustQuery("select * from t_ntz")
defer rows2.Close()
var c1, c2, c3, c4 time.Time
for rows2.Next() {
if err := rows2.Scan(&c1, &c2, &c3, &c4, &c5, &c6, &c7, &c8); err != nil {
t.Errorf("values were not scanned: %v", err)
}
}
expected = time.Date(9999, 12, 31, 23, 59, 59, 0, time.UTC)
if c8.Unix() != expected.Unix() || c8.Nanosecond() != nano {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c8)
}
if c7.Unix() != expected.Unix() || c7.Nanosecond() != nano-(nano%1e2) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c7)
}
if c6.Unix() != expected.Unix() || c6.Nanosecond() != nano-(nano%1e3) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c6)
}
if c5.Unix() != expected.Unix() || c5.Nanosecond() != nano-(nano%1e4) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c5)
}
if c4.Unix() != expected.Unix() || c4.Nanosecond() != nano-(nano%1e5) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c4)
}
if c3.Unix() != expected.Unix() || c3.Nanosecond() != nano-(nano%1e6) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c3)
}
if c2.Unix() != expected.Unix() || c2.Nanosecond() != nano-(nano%1e7) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c2)
}
if c1.Unix() != expected.Unix() || c1.Nanosecond() != nano-(nano%1e8) {
t.Errorf("the value did not match. expected: %v, got: %v", expected, c1)
}
})
}
func TestArrowVariousTypes(t *testing.T) {
runDBTest(t, func(dbt *DBTest) {
rows := dbt.mustQueryContext(
WithHigherPrecision(context.Background()), selectVariousTypes)
defer rows.Close()
if !rows.Next() {
dbt.Error("failed to query")
}
cc, err := rows.Columns()
if err != nil {
dbt.Errorf("columns: %v", cc)
}
ct, err := rows.ColumnTypes()
if err != nil {
dbt.Errorf("column types: %v", ct)
}
var v1 *big.Float
var v2, v2a int
var v3 string
var v4 float64
var v5 []byte
var v6 bool
if err = rows.Scan(&v1, &v2, &v2a, &v3, &v4, &v5, &v6); err != nil {
dbt.Errorf("failed to scan: %#v", err)
}
if v1.Cmp(big.NewFloat(1.0)) != 0 {
dbt.Errorf("failed to scan. %#v", *v1)
}
if ct[0].Name() != "C1" || ct[1].Name() != "C2" || ct[2].Name() != "C2A" || ct[3].Name() != "C3" || ct[4].Name() != "C4" || ct[5].Name() != "C5" || ct[6].Name() != "C6" {
dbt.Errorf("failed to get column names: %#v", ct)
}
if ct[0].ScanType() != reflect.TypeFor[*big.Float]() {
dbt.Errorf("failed to get scan type. expected: %v, got: %v", reflect.TypeFor[float64](), ct[0].ScanType())
}
if ct[1].ScanType() != reflect.TypeFor[int64]() {
dbt.Errorf("failed to get scan type. expected: %v, got: %v", reflect.TypeFor[int64](), ct[1].ScanType())
}
if ct[2].ScanType() != reflect.TypeFor[*big.Int]() {
dbt.Errorf("failed to get scan type. expected: %v, got: %v", reflect.TypeFor[*big.Int](), ct[2].ScanType())
}
var pr, sc int64
var cLen int64
pr, sc = dbt.mustDecimalSize(ct[0])
if pr != 30 || sc != 2 {
dbt.Errorf("failed to get precision and scale. %#v", ct[0])
}
dbt.mustFailLength(ct[0])
if canNull := dbt.mustNullable(ct[0]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[0])
}
if cLen != 0 {
dbt.Errorf("failed to get length. %#v", ct[0])
}
if v2 != 2 {
dbt.Errorf("failed to scan. %#v", v2)
}
pr, sc = dbt.mustDecimalSize(ct[1])
if pr != 18 || sc != 0 {
dbt.Errorf("failed to get precision and scale. %#v", ct[1])
}
dbt.mustFailLength(ct[1])
if canNull := dbt.mustNullable(ct[1]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[1])
}
if v2a != 22 {
dbt.Errorf("failed to scan. %#v", v2a)
}
dbt.mustFailLength(ct[2])
if canNull := dbt.mustNullable(ct[2]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[2])
}
if v3 != "t3" {
dbt.Errorf("failed to scan. %#v", v3)
}
dbt.mustFailDecimalSize(ct[3])
if cLen = dbt.mustLength(ct[3]); cLen != 2 {
dbt.Errorf("failed to get length. %#v", ct[3])
}
if canNull := dbt.mustNullable(ct[3]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[3])
}
if v4 != 4.2 {
dbt.Errorf("failed to scan. %#v", v4)
}
dbt.mustFailDecimalSize(ct[4])
dbt.mustFailLength(ct[4])
if canNull := dbt.mustNullable(ct[4]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[4])
}
if !bytes.Equal(v5, []byte{0xab, 0xcd}) {
dbt.Errorf("failed to scan. %#v", v5)
}
dbt.mustFailDecimalSize(ct[5])
if cLen = dbt.mustLength(ct[5]); cLen != 8388608 { // BINARY
dbt.Errorf("failed to get length. %#v", ct[5])
}
if canNull := dbt.mustNullable(ct[5]); canNull {
dbt.Errorf("failed to get nullable. %#v", ct[5])
}
if !v6 {
dbt.Errorf("failed to scan. %#v", v6)
}
dbt.mustFailDecimalSize(ct[6])
dbt.mustFailLength(ct[6])
})
}
func TestArrowMemoryCleanedUp(t *testing.T) {
mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
defer mem.AssertSize(t, 0)
runDBTest(t, func(dbt *DBTest) {
ctx := WithArrowAllocator(
context.Background(),
mem,
)
rows := dbt.mustQueryContext(ctx, "select 1 UNION select 2 ORDER BY 1")
defer rows.Close()
var v int
assertTrueF(t, rows.Next())
assertNilF(t, rows.Scan(&v))
assertEqualE(t, v, 1)
assertTrueF(t, rows.Next())
assertNilF(t, rows.Scan(&v))
assertEqualE(t, v, 2)
assertFalseE(t, rows.Next())
})
}
================================================
FILE: arrowbatches/batches.go
================================================
package arrowbatches
import (
"cmp"
"context"
"github.com/snowflakedb/gosnowflake/v2/internal/query"
"github.com/snowflakedb/gosnowflake/v2/internal/types"
"time"
sf "github.com/snowflakedb/gosnowflake/v2"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
"github.com/apache/arrow-go/v18/arrow"
"github.com/apache/arrow-go/v18/arrow/memory"
)
// ArrowBatch represents a chunk of data retrievable in arrow.Record format.
type ArrowBatch struct {
raw ia.BatchRaw
rowTypes []query.ExecResponseRowType
allocator memory.Allocator
ctx context.Context
}
// WithContext sets the context for subsequent Fetch calls on this batch.
func (rb *ArrowBatch) WithContext(ctx context.Context) *ArrowBatch {
rb.ctx = ctx
return rb
}
// Fetch returns an array of arrow.Record representing this batch's data.
// Records are transformed from Snowflake's internal format to standard Arrow types.
func (rb *ArrowBatch) Fetch() (*[]arrow.Record, error) {
var rawRecords *[]arrow.Record
ctx := cmp.Or(rb.ctx, context.Background())
if rb.raw.Records != nil {
rawRecords = rb.raw.Records
} else if rb.raw.Download != nil {
recs, rowCount, err := rb.raw.Download(ctx)
if err != nil {
return nil, err
}
rawRecords = recs
rb.raw.Records = recs
rb.raw.RowCount = rowCount
}
if rawRecords == nil || len(*rawRecords) == 0 {
empty := make([]arrow.Record, 0)
return &empty, nil
}
var transformed []arrow.Record
for i, rec := range *rawRecords {
newRec, err := arrowToRecord(ctx, rec, rb.allocator, rb.rowTypes, rb.raw.Location)
if err != nil {
for _, t := range transformed {
t.Release()
}
for _, r := range (*rawRecords)[i:] {
r.Release()
}
rb.raw.Records = nil
return nil, err
}
transformed = append(transformed, newRec)
rec.Release()
}
rb.raw.Records = nil
rb.raw.RowCount = countArrowBatchRows(&transformed)
return &transformed, nil
}
// GetRowCount returns the number of rows in this batch.
func (rb *ArrowBatch) GetRowCount() int {
return rb.raw.RowCount
}
// GetLocation returns the timezone location for this batch.
func (rb *ArrowBatch) GetLocation() *time.Location {
return rb.raw.Location
}
// GetRowTypes returns the column metadata for this batch.
func (rb *ArrowBatch) GetRowTypes() []query.ExecResponseRowType {
return rb.rowTypes
}
// ArrowSnowflakeTimestampToTime converts an original Snowflake timestamp to time.Time.
func (rb *ArrowBatch) ArrowSnowflakeTimestampToTime(rec arrow.Record, colIdx int, recIdx int) *time.Time {
scale := int(rb.rowTypes[colIdx].Scale)
dbType := rb.rowTypes[colIdx].Type
return ArrowSnowflakeTimestampToTime(rec.Column(colIdx), types.GetSnowflakeType(dbType), scale, recIdx, rb.raw.Location)
}
// GetArrowBatches retrieves arrow batches from SnowflakeRows.
// The rows must have been queried with arrowbatches.WithArrowBatches(ctx).
func GetArrowBatches(rows sf.SnowflakeRows) ([]*ArrowBatch, error) {
provider, ok := rows.(ia.BatchDataProvider)
if !ok {
return nil, &sf.SnowflakeError{
Number: sf.ErrNotImplemented,
Message: "rows do not support arrow batch data",
}
}
info, err := provider.GetArrowBatches()
if err != nil {
return nil, err
}
batches := make([]*ArrowBatch, len(info.Batches))
for i, raw := range info.Batches {
batches[i] = &ArrowBatch{
raw: raw,
rowTypes: info.RowTypes,
allocator: info.Allocator,
ctx: info.Ctx,
}
}
return batches, nil
}
func countArrowBatchRows(recs *[]arrow.Record) (cnt int) {
for _, r := range *recs {
cnt += int(r.NumRows())
}
return
}
// GetAllocator returns the memory allocator for this batch.
func (rb *ArrowBatch) GetAllocator() memory.Allocator {
return rb.allocator
}
================================================
FILE: arrowbatches/batches_test.go
================================================
package arrowbatches
import (
"context"
"crypto/rsa"
"crypto/x509"
"database/sql"
"database/sql/driver"
"encoding/pem"
"errors"
"fmt"
"math"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"testing"
"time"
"github.com/apache/arrow-go/v18/arrow"
"github.com/apache/arrow-go/v18/arrow/array"
"github.com/apache/arrow-go/v18/arrow/memory"
sf "github.com/snowflakedb/gosnowflake/v2"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
)
// testConn holds a reusable database connection for running multiple queries.
type testConn struct {
db *sql.DB
conn *sql.Conn
}
// repoRoot walks up from the current working directory to find the directory
// containing go.mod, which is the repository root.
func repoRoot(t *testing.T) string {
t.Helper()
dir, err := os.Getwd()
if err != nil {
t.Fatalf("failed to get working directory: %v", err)
}
for {
if _, err = os.Stat(filepath.Join(dir, "go.mod")); err == nil {
return dir
}
if !os.IsNotExist(err) {
t.Fatalf("failed to stat go.mod in %q: %v", dir, err)
}
parent := filepath.Dir(dir)
if parent == dir {
t.Fatal("could not find repository root (no go.mod found)")
}
dir = parent
}
}
// readPrivateKey reads an RSA private key from a PEM file. If the path is
// relative it is resolved against the repository root so that tests in
// sub-packages work with repo-root-relative paths.
func readPrivateKey(t *testing.T, path string) *rsa.PrivateKey {
t.Helper()
if !filepath.IsAbs(path) {
path = filepath.Join(repoRoot(t), path)
}
data, err := os.ReadFile(path)
if err != nil {
t.Fatalf("failed to read private key file %q: %v", path, err)
}
block, _ := pem.Decode(data)
if block == nil {
t.Fatalf("failed to decode PEM block from %q", path)
}
key, err := x509.ParsePKCS8PrivateKey(block.Bytes)
if err != nil {
t.Fatalf("failed to parse private key from %q: %v", path, err)
}
rsaKey, ok := key.(*rsa.PrivateKey)
if !ok {
t.Fatalf("private key in %q is not RSA (got %T)", path, key)
}
return rsaKey
}
func testConfig(t *testing.T) *sf.Config {
t.Helper()
configParams := []*sf.ConfigParam{
{Name: "Account", EnvName: "SNOWFLAKE_TEST_ACCOUNT", FailOnMissing: true},
{Name: "User", EnvName: "SNOWFLAKE_TEST_USER", FailOnMissing: true},
{Name: "Host", EnvName: "SNOWFLAKE_TEST_HOST", FailOnMissing: false},
{Name: "Port", EnvName: "SNOWFLAKE_TEST_PORT", FailOnMissing: false},
{Name: "Protocol", EnvName: "SNOWFLAKE_TEST_PROTOCOL", FailOnMissing: false},
{Name: "Warehouse", EnvName: "SNOWFLAKE_TEST_WAREHOUSE", FailOnMissing: false},
}
isJWT := os.Getenv("SNOWFLAKE_TEST_AUTHENTICATOR") == "SNOWFLAKE_JWT"
if !isJWT {
configParams = append(configParams,
&sf.ConfigParam{Name: "Password", EnvName: "SNOWFLAKE_TEST_PASSWORD", FailOnMissing: true},
)
}
cfg, err := sf.GetConfigFromEnv(configParams)
if err != nil {
t.Fatalf("failed to get config from environment: %v", err)
}
if isJWT {
privKeyPath := os.Getenv("SNOWFLAKE_TEST_PRIVATE_KEY")
if privKeyPath == "" {
t.Fatal("SNOWFLAKE_TEST_PRIVATE_KEY must be set for JWT authentication")
}
cfg.PrivateKey = readPrivateKey(t, privKeyPath)
cfg.Authenticator = sf.AuthTypeJwt
}
tz := "UTC"
if cfg.Params == nil {
cfg.Params = make(map[string]*string)
}
cfg.Params["timezone"] = &tz
return cfg
}
func openTestConn(ctx context.Context, t *testing.T) *testConn {
t.Helper()
cfg := testConfig(t)
dsn, err := sf.DSN(cfg)
if err != nil {
t.Fatalf("failed to create DSN: %v", err)
}
db, err := sql.Open("snowflake", dsn)
if err != nil {
t.Fatalf("failed to open db: %v", err)
}
conn, err := db.Conn(ctx)
if err != nil {
db.Close()
t.Fatalf("failed to get connection: %v", err)
}
return &testConn{db: db, conn: conn}
}
func (tc *testConn) close() {
tc.conn.Close()
tc.db.Close()
}
// queryRows executes a query on the existing connection and returns
// SnowflakeRows plus a function to close just the rows.
func (tc *testConn) queryRows(ctx context.Context, t *testing.T, query string) (sf.SnowflakeRows, func()) {
t.Helper()
var rows driver.Rows
var err error
err = tc.conn.Raw(func(x any) error {
queryer, ok := x.(driver.QueryerContext)
if !ok {
return fmt.Errorf("connection does not implement QueryerContext")
}
rows, err = queryer.QueryContext(ctx, query, nil)
return err
})
if err != nil {
t.Fatalf("failed to execute query: %v", err)
}
sfRows, ok := rows.(sf.SnowflakeRows)
if !ok {
rows.Close()
t.Fatalf("rows do not implement SnowflakeRows")
}
return sfRows, func() { rows.Close() }
}
// queryRawRows is a convenience wrapper that opens a new connection,
// runs a single query, and returns SnowflakeRows with a full cleanup.
func queryRawRows(ctx context.Context, t *testing.T, query string) (sf.SnowflakeRows, func()) {
t.Helper()
tc := openTestConn(ctx, t)
sfRows, closeRows := tc.queryRows(ctx, t, query)
return sfRows, func() {
closeRows()
tc.close()
}
}
func TestGetArrowBatches(t *testing.T) {
ctx := WithArrowBatches(context.Background())
sfRows, cleanup := queryRawRows(ctx, t, "SELECT 1 AS num, 'hello' AS str")
defer cleanup()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) == 0 {
t.Fatal("expected at least one batch")
}
records, err := batches[0].Fetch()
if err != nil {
t.Fatalf("Fetch failed: %v", err)
}
if records == nil || len(*records) == 0 {
t.Fatal("expected at least one record")
}
rec := (*records)[0]
defer rec.Release()
if rec.NumCols() != 2 {
t.Fatalf("expected 2 columns, got %d", rec.NumCols())
}
if rec.NumRows() != 1 {
t.Fatalf("expected 1 row, got %d", rec.NumRows())
}
}
func TestGetArrowBatchesHighPrecision(t *testing.T) {
ctx := sf.WithHigherPrecision(WithArrowBatches(context.Background()))
sfRows, cleanup := queryRawRows(ctx, t, "SELECT '0.1'::DECIMAL(38, 19) AS c")
defer cleanup()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) == 0 {
t.Fatal("expected at least one batch")
}
records, err := batches[0].Fetch()
if err != nil {
t.Fatalf("Fetch failed: %v", err)
}
if records == nil || len(*records) == 0 {
t.Fatal("expected at least one record")
}
rec := (*records)[0]
defer rec.Release()
strVal := rec.Column(0).ValueStr(0)
expected := "1000000000000000000"
if strVal != expected {
t.Fatalf("expected %q, got %q", expected, strVal)
}
}
func TestGetArrowBatchesLargeResultSet(t *testing.T) {
numrows := 3000
pool := memory.NewCheckedAllocator(memory.DefaultAllocator)
defer pool.AssertSize(t, 0)
ctx := sf.WithArrowAllocator(WithArrowBatches(context.Background()), pool)
query := fmt.Sprintf("SELECT SEQ8(), RANDSTR(1000, RANDOM()) FROM TABLE(GENERATOR(ROWCOUNT=>%v))", numrows)
sfRows, cleanup := queryRawRows(ctx, t, query)
defer cleanup()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) == 0 {
t.Fatal("expected at least one batch")
}
maxWorkers := 10
type count struct {
mu sync.Mutex
val int
}
cnt := &count{}
var wg sync.WaitGroup
work := make(chan int, len(batches))
for range maxWorkers {
wg.Add(1)
go func() {
defer wg.Done()
for i := range work {
recs, fetchErr := batches[i].Fetch()
if fetchErr != nil {
t.Errorf("Fetch failed for batch %d: %v", i, fetchErr)
return
}
for _, r := range *recs {
cnt.mu.Lock()
cnt.val += int(r.NumRows())
cnt.mu.Unlock()
r.Release()
}
}
}()
}
for i := range batches {
work <- i
}
close(work)
wg.Wait()
if cnt.val != numrows {
t.Fatalf("row count mismatch: expected %d, got %d", numrows, cnt.val)
}
}
func TestGetArrowBatchesWithTimestampOption(t *testing.T) {
ctx := WithTimestampOption(
WithArrowBatches(context.Background()),
UseOriginalTimestamp,
)
sfRows, cleanup := queryRawRows(ctx, t, "SELECT TO_TIMESTAMP_NTZ('2024-01-15 13:45:30.123456789') AS ts")
defer cleanup()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) == 0 {
t.Fatal("expected at least one batch")
}
records, err := batches[0].Fetch()
if err != nil {
t.Fatalf("Fetch failed: %v", err)
}
if records == nil || len(*records) == 0 {
t.Fatal("expected at least one record")
}
rec := (*records)[0]
defer rec.Release()
if rec.NumRows() != 1 {
t.Fatalf("expected 1 row, got %d", rec.NumRows())
}
if rec.NumCols() != 1 {
t.Fatalf("expected 1 column, got %d", rec.NumCols())
}
}
func TestGetArrowBatchesJSONResponseError(t *testing.T) {
ctx := WithArrowBatches(context.Background())
cfg := testConfig(t)
dsn, err := sf.DSN(cfg)
if err != nil {
t.Fatalf("failed to create DSN: %v", err)
}
db, err := sql.Open("snowflake", dsn)
if err != nil {
t.Fatalf("failed to open db: %v", err)
}
defer db.Close()
conn, err := db.Conn(ctx)
if err != nil {
t.Fatalf("failed to get connection: %v", err)
}
defer conn.Close()
_, err = conn.ExecContext(ctx, "ALTER SESSION SET GO_QUERY_RESULT_FORMAT = json")
if err != nil {
t.Fatalf("failed to set JSON format: %v", err)
}
var rows driver.Rows
err = conn.Raw(func(x any) error {
queryer, ok := x.(driver.QueryerContext)
if !ok {
return fmt.Errorf("connection does not implement QueryerContext")
}
rows, err = queryer.QueryContext(ctx, "SELECT 'hello'", nil)
return err
})
if err != nil {
t.Fatalf("failed to execute query: %v", err)
}
defer rows.Close()
sfRows, ok := rows.(sf.SnowflakeRows)
if !ok {
t.Fatal("rows do not implement SnowflakeRows")
}
_, err = GetArrowBatches(sfRows)
if err == nil {
t.Fatal("expected error when using arrow batches with JSON response")
}
var se *sf.SnowflakeError
if !errors.As(err, &se) {
t.Fatalf("expected SnowflakeError, got %T: %v", err, err)
}
if se.Number != sf.ErrNonArrowResponseInArrowBatches {
t.Fatalf("expected error code %d, got %d", sf.ErrNonArrowResponseInArrowBatches, se.Number)
}
}
// TestTimestampConversionDistantDates tests all 10 timestamp scales (0-9)
// because each scale exercises a mathematically distinct code path in
// extractEpoch/extractFraction (converter.go). Past bugs have been
// scale-specific: SNOW-526255 (time scale for arrow) and SNOW-2091309
// (precision loss at scale 0). Do not reduce the scale range.
func TestTimestampConversionDistantDates(t *testing.T) {
timestamps := [2]string{
"9999-12-12 23:59:59.999999999",
"0001-01-01 00:00:00.000000000",
}
tsTypes := [3]string{"TIMESTAMP_NTZ", "TIMESTAMP_LTZ", "TIMESTAMP_TZ"}
precisions := []struct {
name string
option ia.TimestampOption
unit arrow.TimeUnit
expectError bool
}{
{"second", UseSecondTimestamp, arrow.Second, false},
{"millisecond", UseMillisecondTimestamp, arrow.Millisecond, false},
{"microsecond", UseMicrosecondTimestamp, arrow.Microsecond, false},
{"nanosecond", UseNanosecondTimestamp, arrow.Nanosecond, true},
}
for _, prec := range precisions {
t.Run(prec.name, func(t *testing.T) {
t.Parallel()
pool := memory.NewCheckedAllocator(memory.DefaultAllocator)
defer pool.AssertSize(t, 0)
ctx := sf.WithArrowAllocator(
WithTimestampOption(WithArrowBatches(context.Background()), prec.option),
pool,
)
tc := openTestConn(ctx, t)
defer tc.close()
for _, tsStr := range timestamps {
for _, tp := range tsTypes {
for scale := 0; scale <= 9; scale++ {
t.Run(tp+"("+strconv.Itoa(scale)+")_"+tsStr, func(t *testing.T) {
query := fmt.Sprintf("SELECT '%s'::%s(%v)", tsStr, tp, scale)
sfRows, closeRows := tc.queryRows(ctx, t, query)
defer closeRows()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) == 0 {
t.Fatal("expected at least one batch")
}
records, err := batches[0].Fetch()
if prec.expectError {
expectedError := "Cannot convert timestamp"
if err == nil {
t.Fatalf("no error, expected: %v", expectedError)
}
if !strings.Contains(err.Error(), expectedError) {
t.Fatalf("improper error, expected: %v, got: %v", expectedError, err.Error())
}
return
}
if err != nil {
t.Fatalf("Fetch failed: %v", err)
}
if records == nil || len(*records) == 0 {
t.Fatal("expected at least one record")
}
rec := (*records)[0]
defer rec.Release()
actual := rec.Column(0).(*array.Timestamp).TimestampValues()[0]
actualYear := actual.ToTime(prec.unit).Year()
ts, err := time.Parse("2006-01-02 15:04:05", tsStr)
if err != nil {
t.Fatalf("failed to parse time: %v", err)
}
exp := ts.Truncate(time.Duration(math.Pow10(9 - scale)))
if actualYear != exp.Year() {
t.Fatalf("unexpected year, expected: %v, got: %v", exp.Year(), actualYear)
}
})
}
}
}
})
}
}
// TestTimestampConversionWithOriginalTimestamp tests all 10 timestamp scales
// (0-9) because each scale exercises a mathematically distinct code path in
// extractEpoch/extractFraction. See TestTimestampConversionDistantDates for
// rationale on why the full scale range is required.
func TestTimestampConversionWithOriginalTimestamp(t *testing.T) {
timestamps := [3]string{
"2000-10-10 10:10:10.123456789",
"9999-12-12 23:59:59.999999999",
"0001-01-01 00:00:00.000000000",
}
tsTypes := [3]string{"TIMESTAMP_NTZ", "TIMESTAMP_LTZ", "TIMESTAMP_TZ"}
pool := memory.NewCheckedAllocator(memory.DefaultAllocator)
defer pool.AssertSize(t, 0)
ctx := sf.WithArrowAllocator(
WithTimestampOption(WithArrowBatches(context.Background()), UseOriginalTimestamp),
pool,
)
tc := openTestConn(ctx, t)
defer tc.close()
for _, tsStr := range timestamps {
ts, err := time.Parse("2006-01-02 15:04:05", tsStr)
if err != nil {
t.Fatalf("failed to parse time: %v", err)
}
for _, tp := range tsTypes {
t.Run(tp+"_"+tsStr, func(t *testing.T) {
// Batch all 10 scales into a single multi-column query to reduce round trips.
var cols []string
for scale := 0; scale <= 9; scale++ {
cols = append(cols, fmt.Sprintf("'%s'::%s(%v)", tsStr, tp, scale))
}
query := "SELECT " + strings.Join(cols, ", ")
sfRows, closeRows := tc.queryRows(ctx, t, query)
defer closeRows()
batches, err := GetArrowBatches(sfRows)
if err != nil {
t.Fatalf("GetArrowBatches failed: %v", err)
}
if len(batches) != 1 {
t.Fatalf("expected 1 batch, got %d", len(batches))
}
records, err := batches[0].Fetch()
if err != nil {
t.Fatalf("Fetch failed: %v", err)
}
if records == nil || len(*records) == 0 {
t.Fatal("expected at least one record")
}
for scale := 0; scale <= 9; scale++ {
exp := ts.Truncate(time.Duration(math.Pow10(9 - scale)))
for _, r := range *records {
defer r.Release()
act := batches[0].ArrowSnowflakeTimestampToTime(r, scale, 0)
if act == nil {
t.Fatalf("scale %d: unexpected nil, expected: %v", scale, exp)
} else if !exp.Equal(*act) {
t.Fatalf("scale %d: unexpected result, expected: %v, got: %v", scale, exp, *act)
}
}
}
})
}
}
}
================================================
FILE: arrowbatches/context.go
================================================
package arrowbatches
import (
"context"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
)
// Timestamp option constants.
const (
UseNanosecondTimestamp = ia.UseNanosecondTimestamp
UseMicrosecondTimestamp = ia.UseMicrosecondTimestamp
UseMillisecondTimestamp = ia.UseMillisecondTimestamp
UseSecondTimestamp = ia.UseSecondTimestamp
UseOriginalTimestamp = ia.UseOriginalTimestamp
)
// WithArrowBatches returns a context that enables arrow batch mode for queries.
func WithArrowBatches(ctx context.Context) context.Context {
return ia.EnableArrowBatches(ctx)
}
// WithTimestampOption returns a context that sets the timestamp conversion option
// for arrow batches.
func WithTimestampOption(ctx context.Context, option ia.TimestampOption) context.Context {
return ia.WithTimestampOption(ctx, option)
}
// WithUtf8Validation returns a context that enables UTF-8 validation for
// string columns in arrow batches.
func WithUtf8Validation(ctx context.Context) context.Context {
return ia.EnableUtf8Validation(ctx)
}
================================================
FILE: arrowbatches/converter.go
================================================
package arrowbatches
import (
"context"
"fmt"
"github.com/snowflakedb/gosnowflake/v2/internal/query"
"github.com/snowflakedb/gosnowflake/v2/internal/types"
"math"
"math/big"
"strings"
"time"
"unicode/utf8"
sf "github.com/snowflakedb/gosnowflake/v2"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
"github.com/apache/arrow-go/v18/arrow"
"github.com/apache/arrow-go/v18/arrow/array"
"github.com/apache/arrow-go/v18/arrow/compute"
"github.com/apache/arrow-go/v18/arrow/memory"
)
// arrowToRecord transforms a raw arrow.Record from Snowflake into a record
// with standard Arrow types (e.g., converting struct-based timestamps to
// arrow.Timestamp, decimal128 to int64/float64, etc.)
func arrowToRecord(ctx context.Context, record arrow.Record, pool memory.Allocator, rowType []query.ExecResponseRowType, loc *time.Location) (arrow.Record, error) {
timestampOption := ia.GetTimestampOption(ctx)
higherPrecision := ia.HigherPrecisionEnabled(ctx)
s, err := recordToSchema(record.Schema(), rowType, loc, timestampOption, higherPrecision)
if err != nil {
return nil, err
}
var cols []arrow.Array
numRows := record.NumRows()
ctxAlloc := compute.WithAllocator(ctx, pool)
for i, col := range record.Columns() {
fieldMetadata := rowType[i].ToFieldMetadata()
newCol, err := arrowToRecordSingleColumn(ctxAlloc, s.Field(i), col, fieldMetadata, higherPrecision, timestampOption, pool, loc, numRows)
if err != nil {
return nil, err
}
cols = append(cols, newCol)
defer newCol.Release()
}
newRecord := array.NewRecord(s, cols, numRows)
return newRecord, nil
}
func arrowToRecordSingleColumn(ctx context.Context, field arrow.Field, col arrow.Array, fieldMetadata query.FieldMetadata, higherPrecisionEnabled bool, timestampOption ia.TimestampOption, pool memory.Allocator, loc *time.Location, numRows int64) (arrow.Array, error) {
var err error
newCol := col
snowflakeType := types.GetSnowflakeType(fieldMetadata.Type)
switch snowflakeType {
case types.FixedType:
if higherPrecisionEnabled {
col.Retain()
} else if col.DataType().ID() == arrow.DECIMAL || col.DataType().ID() == arrow.DECIMAL256 {
var toType arrow.DataType
if fieldMetadata.Scale == 0 {
toType = arrow.PrimitiveTypes.Int64
} else {
toType = arrow.PrimitiveTypes.Float64
}
newCol, err = compute.CastArray(ctx, col, compute.UnsafeCastOptions(toType))
if err != nil {
return nil, err
}
} else if fieldMetadata.Scale != 0 && col.DataType().ID() != arrow.INT64 {
result, err := compute.Divide(ctx, compute.ArithmeticOptions{NoCheckOverflow: true},
&compute.ArrayDatum{Value: newCol.Data()},
compute.NewDatum(math.Pow10(int(fieldMetadata.Scale))))
if err != nil {
return nil, err
}
defer result.Release()
newCol = result.(*compute.ArrayDatum).MakeArray()
} else if fieldMetadata.Scale != 0 && col.DataType().ID() == arrow.INT64 {
values := col.(*array.Int64).Int64Values()
floatValues := make([]float64, len(values))
for i, val := range values {
floatValues[i], _ = intToBigFloat(val, int64(fieldMetadata.Scale)).Float64()
}
builder := array.NewFloat64Builder(pool)
builder.AppendValues(floatValues, nil)
newCol = builder.NewArray()
builder.Release()
} else {
col.Retain()
}
case types.TimeType:
newCol, err = compute.CastArray(ctx, col, compute.SafeCastOptions(arrow.FixedWidthTypes.Time64ns))
if err != nil {
return nil, err
}
case types.TimestampNtzType, types.TimestampLtzType, types.TimestampTzType:
if timestampOption == ia.UseOriginalTimestamp {
col.Retain()
} else {
var unit arrow.TimeUnit
switch timestampOption {
case ia.UseMicrosecondTimestamp:
unit = arrow.Microsecond
case ia.UseMillisecondTimestamp:
unit = arrow.Millisecond
case ia.UseSecondTimestamp:
unit = arrow.Second
case ia.UseNanosecondTimestamp:
unit = arrow.Nanosecond
}
var tb *array.TimestampBuilder
if snowflakeType == types.TimestampLtzType {
tb = array.NewTimestampBuilder(pool, &arrow.TimestampType{Unit: unit, TimeZone: loc.String()})
} else {
tb = array.NewTimestampBuilder(pool, &arrow.TimestampType{Unit: unit})
}
defer tb.Release()
for i := 0; i < int(numRows); i++ {
ts := ArrowSnowflakeTimestampToTime(col, snowflakeType, int(fieldMetadata.Scale), i, loc)
if ts != nil {
var ar arrow.Timestamp
switch timestampOption {
case ia.UseMicrosecondTimestamp:
ar = arrow.Timestamp(ts.UnixMicro())
case ia.UseMillisecondTimestamp:
ar = arrow.Timestamp(ts.UnixMilli())
case ia.UseSecondTimestamp:
ar = arrow.Timestamp(ts.Unix())
case ia.UseNanosecondTimestamp:
ar = arrow.Timestamp(ts.UnixNano())
if ts.UTC().Year() != ar.ToTime(arrow.Nanosecond).Year() {
return nil, &sf.SnowflakeError{
Number: sf.ErrTooHighTimestampPrecision,
SQLState: sf.SQLStateInvalidDataTimeFormat,
Message: fmt.Sprintf("Cannot convert timestamp %v in column %v to Arrow.Timestamp data type due to too high precision. Please use context with WithOriginalTimestamp.", ts.UTC(), fieldMetadata.Name),
}
}
}
tb.Append(ar)
} else {
tb.AppendNull()
}
}
newCol = tb.NewArray()
}
case types.TextType:
if stringCol, ok := col.(*array.String); ok {
newCol = arrowStringRecordToColumn(ctx, stringCol, pool, numRows)
}
case types.ObjectType:
if structCol, ok := col.(*array.Struct); ok {
var internalCols []arrow.Array
for i := 0; i < structCol.NumField(); i++ {
internalCol := structCol.Field(i)
newInternalCol, err := arrowToRecordSingleColumn(ctx, field.Type.(*arrow.StructType).Field(i), internalCol, fieldMetadata.Fields[i], higherPrecisionEnabled, timestampOption, pool, loc, numRows)
if err != nil {
return nil, err
}
internalCols = append(internalCols, newInternalCol)
defer newInternalCol.Release()
}
var fieldNames []string
for _, f := range field.Type.(*arrow.StructType).Fields() {
fieldNames = append(fieldNames, f.Name)
}
nullBitmap := memory.NewBufferBytes(structCol.NullBitmapBytes())
numberOfNulls := structCol.NullN()
return array.NewStructArrayWithNulls(internalCols, fieldNames, nullBitmap, numberOfNulls, 0)
} else if stringCol, ok := col.(*array.String); ok {
newCol = arrowStringRecordToColumn(ctx, stringCol, pool, numRows)
}
case types.ArrayType:
if listCol, ok := col.(*array.List); ok {
newCol, err = arrowToRecordSingleColumn(ctx, field.Type.(*arrow.ListType).ElemField(), listCol.ListValues(), fieldMetadata.Fields[0], higherPrecisionEnabled, timestampOption, pool, loc, numRows)
if err != nil {
return nil, err
}
defer newCol.Release()
newData := array.NewData(arrow.ListOf(newCol.DataType()), listCol.Len(), listCol.Data().Buffers(), []arrow.ArrayData{newCol.Data()}, listCol.NullN(), 0)
defer newData.Release()
return array.NewListData(newData), nil
} else if stringCol, ok := col.(*array.String); ok {
newCol = arrowStringRecordToColumn(ctx, stringCol, pool, numRows)
}
case types.MapType:
if mapCol, ok := col.(*array.Map); ok {
keyCol, err := arrowToRecordSingleColumn(ctx, field.Type.(*arrow.MapType).KeyField(), mapCol.Keys(), fieldMetadata.Fields[0], higherPrecisionEnabled, timestampOption, pool, loc, numRows)
if err != nil {
return nil, err
}
defer keyCol.Release()
valueCol, err := arrowToRecordSingleColumn(ctx, field.Type.(*arrow.MapType).ItemField(), mapCol.Items(), fieldMetadata.Fields[1], higherPrecisionEnabled, timestampOption, pool, loc, numRows)
if err != nil {
return nil, err
}
defer valueCol.Release()
structArr, err := array.NewStructArray([]arrow.Array{keyCol, valueCol}, []string{"k", "v"})
if err != nil {
return nil, err
}
defer structArr.Release()
newData := array.NewData(arrow.MapOf(keyCol.DataType(), valueCol.DataType()), mapCol.Len(), mapCol.Data().Buffers(), []arrow.ArrayData{structArr.Data()}, mapCol.NullN(), 0)
defer newData.Release()
return array.NewMapData(newData), nil
} else if stringCol, ok := col.(*array.String); ok {
newCol = arrowStringRecordToColumn(ctx, stringCol, pool, numRows)
}
default:
col.Retain()
}
return newCol, nil
}
func arrowStringRecordToColumn(
ctx context.Context,
stringCol *array.String,
mem memory.Allocator,
numRows int64,
) arrow.Array {
if ia.Utf8ValidationEnabled(ctx) && stringCol.DataType().ID() == arrow.STRING {
tb := array.NewStringBuilder(mem)
defer tb.Release()
for i := 0; i < int(numRows); i++ {
if stringCol.IsValid(i) {
stringValue := stringCol.Value(i)
if !utf8.ValidString(stringValue) {
stringValue = strings.ToValidUTF8(stringValue, "�")
}
tb.Append(stringValue)
} else {
tb.AppendNull()
}
}
arr := tb.NewArray()
return arr
}
stringCol.Retain()
return stringCol
}
func intToBigFloat(val int64, scale int64) *big.Float {
f := new(big.Float).SetInt64(val)
s := new(big.Float).SetInt(new(big.Int).Exp(big.NewInt(10), big.NewInt(scale), nil))
return new(big.Float).Quo(f, s)
}
// ArrowSnowflakeTimestampToTime converts original timestamp returned by Snowflake to time.Time.
func ArrowSnowflakeTimestampToTime(
column arrow.Array,
sfType types.SnowflakeType,
scale int,
recIdx int,
loc *time.Location) *time.Time {
if column.IsNull(recIdx) {
return nil
}
var ret time.Time
switch sfType {
case types.TimestampNtzType:
if column.DataType().ID() == arrow.STRUCT {
structData := column.(*array.Struct)
epoch := structData.Field(0).(*array.Int64).Int64Values()
fraction := structData.Field(1).(*array.Int32).Int32Values()
ret = time.Unix(epoch[recIdx], int64(fraction[recIdx])).UTC()
} else {
intData := column.(*array.Int64)
value := intData.Value(recIdx)
epoch := extractEpoch(value, scale)
fraction := extractFraction(value, scale)
ret = time.Unix(epoch, fraction).UTC()
}
case types.TimestampLtzType:
if column.DataType().ID() == arrow.STRUCT {
structData := column.(*array.Struct)
epoch := structData.Field(0).(*array.Int64).Int64Values()
fraction := structData.Field(1).(*array.Int32).Int32Values()
ret = time.Unix(epoch[recIdx], int64(fraction[recIdx])).In(loc)
} else {
intData := column.(*array.Int64)
value := intData.Value(recIdx)
epoch := extractEpoch(value, scale)
fraction := extractFraction(value, scale)
ret = time.Unix(epoch, fraction).In(loc)
}
case types.TimestampTzType:
structData := column.(*array.Struct)
if structData.NumField() == 2 {
value := structData.Field(0).(*array.Int64).Int64Values()
timezone := structData.Field(1).(*array.Int32).Int32Values()
epoch := extractEpoch(value[recIdx], scale)
fraction := extractFraction(value[recIdx], scale)
locTz := sf.Location(int(timezone[recIdx]) - 1440)
ret = time.Unix(epoch, fraction).In(locTz)
} else {
epoch := structData.Field(0).(*array.Int64).Int64Values()
fraction := structData.Field(1).(*array.Int32).Int32Values()
timezone := structData.Field(2).(*array.Int32).Int32Values()
locTz := sf.Location(int(timezone[recIdx]) - 1440)
ret = time.Unix(epoch[recIdx], int64(fraction[recIdx])).In(locTz)
}
}
return &ret
}
func extractEpoch(value int64, scale int) int64 {
return value / int64(math.Pow10(scale))
}
func extractFraction(value int64, scale int) int64 {
return (value % int64(math.Pow10(scale))) * int64(math.Pow10(9-scale))
}
================================================
FILE: arrowbatches/converter_test.go
================================================
package arrowbatches
import (
"context"
"fmt"
"github.com/snowflakedb/gosnowflake/v2/internal/query"
"github.com/snowflakedb/gosnowflake/v2/internal/types"
"math/big"
"strings"
"testing"
"time"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
"github.com/apache/arrow-go/v18/arrow"
"github.com/apache/arrow-go/v18/arrow/array"
"github.com/apache/arrow-go/v18/arrow/decimal128"
"github.com/apache/arrow-go/v18/arrow/memory"
)
var decimalShift = new(big.Int).Exp(big.NewInt(2), big.NewInt(64), nil)
func stringIntToDecimal(src string) (decimal128.Num, bool) {
b, ok := new(big.Int).SetString(src, 10)
if !ok {
return decimal128.Num{}, ok
}
var high, low big.Int
high.QuoRem(b, decimalShift, &low)
return decimal128.New(high.Int64(), low.Uint64()), true
}
func decimalToBigInt(num decimal128.Num) *big.Int {
high := new(big.Int).SetInt64(num.HighBits())
low := new(big.Int).SetUint64(num.LowBits())
return new(big.Int).Add(new(big.Int).Mul(high, decimalShift), low)
}
func TestArrowToRecord(t *testing.T) {
pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
defer pool.AssertSize(t, 0)
var valids []bool
localTime := time.Date(2019, 1, 1, 1, 17, 31, 123456789, time.FixedZone("-08:00", -8*3600))
localTimeFarIntoFuture := time.Date(9000, 2, 6, 14, 17, 31, 123456789, time.FixedZone("-08:00", -8*3600))
epochField := arrow.Field{Name: "epoch", Type: &arrow.Int64Type{}}
timezoneField := arrow.Field{Name: "timezone", Type: &arrow.Int32Type{}}
fractionField := arrow.Field{Name: "fraction", Type: &arrow.Int32Type{}}
timestampTzStructWithoutFraction := arrow.StructOf(epochField, timezoneField)
timestampTzStructWithFraction := arrow.StructOf(epochField, fractionField, timezoneField)
timestampNtzStruct := arrow.StructOf(epochField, fractionField)
timestampLtzStruct := arrow.StructOf(epochField, fractionField)
type testObj struct {
field1 int
field2 string
}
for _, tc := range []struct {
logical string
physical string
sc *arrow.Schema
rowType query.ExecResponseRowType
values any
expected any
error string
arrowBatchesTimestampOption ia.TimestampOption
enableArrowBatchesUtf8Validation bool
withHigherPrecision bool
nrows int
builder array.Builder
append func(b array.Builder, vs any)
compare func(src any, expected any, rec arrow.Record) int
}{
{
logical: "fixed",
physical: "number",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
values: []int64{1, 2},
nrows: 2,
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int64Builder).AppendValues(vs.([]int64), valids) },
},
{
logical: "fixed",
physical: "int64",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Decimal128Type{Precision: 38, Scale: 0}}}, nil),
values: []string{"10000000000000000000000000000000000000", "-12345678901234567890123456789012345678"},
nrows: 2,
builder: array.NewDecimal128Builder(pool, &arrow.Decimal128Type{Precision: 38, Scale: 0}),
append: func(b array.Builder, vs any) {
for _, s := range vs.([]string) {
num, ok := stringIntToDecimal(s)
if !ok {
t.Fatalf("failed to convert to Int64")
}
b.(*array.Decimal128Builder).Append(num)
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]string)
for i, dec := range convertedRec.Column(0).(*array.Int64).Int64Values() {
num, ok := stringIntToDecimal(srcvs[i])
if !ok {
return i
}
srcDec := decimalToBigInt(num).Int64()
if srcDec != dec {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "number(38,0)",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Decimal128Type{Precision: 38, Scale: 0}}}, nil),
values: []string{"10000000000000000000000000000000000000", "-12345678901234567890123456789012345678"},
withHigherPrecision: true,
nrows: 2,
builder: array.NewDecimal128Builder(pool, &arrow.Decimal128Type{Precision: 38, Scale: 0}),
append: func(b array.Builder, vs any) {
for _, s := range vs.([]string) {
num, ok := stringIntToDecimal(s)
if !ok {
t.Fatalf("failed to convert to Int64")
}
b.(*array.Decimal128Builder).Append(num)
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]string)
for i, dec := range convertedRec.Column(0).(*array.Decimal128).Values() {
srcDec, ok := stringIntToDecimal(srcvs[i])
if !ok {
return i
}
if srcDec != dec {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "float64",
rowType: query.ExecResponseRowType{Scale: 37},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Decimal128Type{Precision: 38, Scale: 37}}}, nil),
values: []string{"1.2345678901234567890123456789012345678", "-9.999999999999999"},
nrows: 2,
builder: array.NewDecimal128Builder(pool, &arrow.Decimal128Type{Precision: 38, Scale: 37}),
append: func(b array.Builder, vs any) {
for _, s := range vs.([]string) {
num, err := decimal128.FromString(s, 38, 37)
if err != nil {
t.Fatalf("failed to convert to decimal: %s", err)
}
b.(*array.Decimal128Builder).Append(num)
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]string)
for i, dec := range convertedRec.Column(0).(*array.Float64).Float64Values() {
num, err := decimal128.FromString(srcvs[i], 38, 37)
if err != nil {
return i
}
srcDec := num.ToFloat64(37)
if srcDec != dec {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "number(38,37)",
rowType: query.ExecResponseRowType{Scale: 37},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Decimal128Type{Precision: 38, Scale: 37}}}, nil),
values: []string{"1.2345678901234567890123456789012345678", "-9.999999999999999"},
withHigherPrecision: true,
nrows: 2,
builder: array.NewDecimal128Builder(pool, &arrow.Decimal128Type{Precision: 38, Scale: 37}),
append: func(b array.Builder, vs any) {
for _, s := range vs.([]string) {
num, err := decimal128.FromString(s, 38, 37)
if err != nil {
t.Fatalf("failed to convert to decimal: %s", err)
}
b.(*array.Decimal128Builder).Append(num)
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]string)
for i, dec := range convertedRec.Column(0).(*array.Decimal128).Values() {
srcDec, err := decimal128.FromString(srcvs[i], 38, 37)
if err != nil {
return i
}
if srcDec != dec {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "int8",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int8Type{}}}, nil),
values: []int8{1, 2},
nrows: 2,
builder: array.NewInt8Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int8Builder).AppendValues(vs.([]int8), valids) },
},
{
logical: "fixed",
physical: "int16",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int16Type{}}}, nil),
values: []int16{1, 2},
nrows: 2,
builder: array.NewInt16Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int16Builder).AppendValues(vs.([]int16), valids) },
},
{
logical: "fixed",
physical: "int32",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int32Type{}}}, nil),
values: []int32{1, 2},
nrows: 2,
builder: array.NewInt32Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int32Builder).AppendValues(vs.([]int32), valids) },
},
{
logical: "fixed",
physical: "int64",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
values: []int64{1, 2},
nrows: 2,
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int64Builder).AppendValues(vs.([]int64), valids) },
},
{
logical: "fixed",
physical: "float8",
rowType: query.ExecResponseRowType{Scale: 1},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int8Type{}}}, nil),
values: []int8{10, 16},
nrows: 2,
builder: array.NewInt8Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int8Builder).AppendValues(vs.([]int8), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int8)
for i, f := range convertedRec.Column(0).(*array.Float64).Float64Values() {
rawFloat, _ := intToBigFloat(int64(srcvs[i]), 1).Float64()
if rawFloat != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "int8",
rowType: query.ExecResponseRowType{Scale: 1},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int8Type{}}}, nil),
values: []int8{10, 16},
withHigherPrecision: true,
nrows: 2,
builder: array.NewInt8Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int8Builder).AppendValues(vs.([]int8), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int8)
for i, f := range convertedRec.Column(0).(*array.Int8).Int8Values() {
if srcvs[i] != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "float16",
rowType: query.ExecResponseRowType{Scale: 1},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int16Type{}}}, nil),
values: []int16{20, 26},
nrows: 2,
builder: array.NewInt16Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int16Builder).AppendValues(vs.([]int16), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int16)
for i, f := range convertedRec.Column(0).(*array.Float64).Float64Values() {
rawFloat, _ := intToBigFloat(int64(srcvs[i]), 1).Float64()
if rawFloat != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "int16",
rowType: query.ExecResponseRowType{Scale: 1},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int16Type{}}}, nil),
values: []int16{20, 26},
withHigherPrecision: true,
nrows: 2,
builder: array.NewInt16Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int16Builder).AppendValues(vs.([]int16), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int16)
for i, f := range convertedRec.Column(0).(*array.Int16).Int16Values() {
if srcvs[i] != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "float32",
rowType: query.ExecResponseRowType{Scale: 2},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int32Type{}}}, nil),
values: []int32{200, 265},
nrows: 2,
builder: array.NewInt32Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int32Builder).AppendValues(vs.([]int32), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int32)
for i, f := range convertedRec.Column(0).(*array.Float64).Float64Values() {
rawFloat, _ := intToBigFloat(int64(srcvs[i]), 2).Float64()
if rawFloat != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "int32",
rowType: query.ExecResponseRowType{Scale: 2},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int32Type{}}}, nil),
values: []int32{200, 265},
withHigherPrecision: true,
nrows: 2,
builder: array.NewInt32Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int32Builder).AppendValues(vs.([]int32), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int32)
for i, f := range convertedRec.Column(0).(*array.Int32).Int32Values() {
if srcvs[i] != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "float64",
rowType: query.ExecResponseRowType{Scale: 5},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
values: []int64{12345, 234567},
nrows: 2,
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int64Builder).AppendValues(vs.([]int64), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int64)
for i, f := range convertedRec.Column(0).(*array.Float64).Float64Values() {
rawFloat, _ := intToBigFloat(srcvs[i], 5).Float64()
if rawFloat != f {
return i
}
}
return -1
},
},
{
logical: "fixed",
physical: "int64",
rowType: query.ExecResponseRowType{Scale: 5},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
values: []int64{12345, 234567},
withHigherPrecision: true,
nrows: 2,
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Int64Builder).AppendValues(vs.([]int64), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]int64)
for i, f := range convertedRec.Column(0).(*array.Int64).Int64Values() {
if srcvs[i] != f {
return i
}
}
return -1
},
},
{
logical: "boolean",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.BooleanType{}}}, nil),
values: []bool{true, false},
nrows: 2,
builder: array.NewBooleanBuilder(pool),
append: func(b array.Builder, vs any) { b.(*array.BooleanBuilder).AppendValues(vs.([]bool), valids) },
},
{
logical: "real",
physical: "float",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Float64Type{}}}, nil),
values: []float64{1, 2},
nrows: 2,
builder: array.NewFloat64Builder(pool),
append: func(b array.Builder, vs any) { b.(*array.Float64Builder).AppendValues(vs.([]float64), valids) },
},
{
logical: "text",
physical: "string",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.StringType{}}}, nil),
values: []string{"foo", "bar"},
nrows: 2,
builder: array.NewStringBuilder(pool),
append: func(b array.Builder, vs any) { b.(*array.StringBuilder).AppendValues(vs.([]string), valids) },
},
{
logical: "text",
physical: "string with invalid utf8",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.StringType{}}}, nil),
rowType: query.ExecResponseRowType{Type: "TEXT"},
values: []string{"\xFF", "bar", "baz\xFF\xFF"},
expected: []string{"�", "bar", "baz��"},
enableArrowBatchesUtf8Validation: true,
nrows: 2,
builder: array.NewStringBuilder(pool),
append: func(b array.Builder, vs any) { b.(*array.StringBuilder).AppendValues(vs.([]string), valids) },
compare: func(src any, expected any, convertedRec arrow.Record) int {
arr := convertedRec.Column(0).(*array.String)
for i := 0; i < arr.Len(); i++ {
if expected.([]string)[i] != arr.Value(i) {
return i
}
}
return -1
},
},
{
logical: "binary",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.BinaryType{}}}, nil),
values: [][]byte{[]byte("foo"), []byte("bar")},
nrows: 2,
builder: array.NewBinaryBuilder(pool, arrow.BinaryTypes.Binary),
append: func(b array.Builder, vs any) { b.(*array.BinaryBuilder).AppendValues(vs.([][]byte), valids) },
},
{
logical: "date",
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Date32Type{}}}, nil),
values: []time.Time{time.Now(), localTime},
nrows: 2,
builder: array.NewDate32Builder(pool),
append: func(b array.Builder, vs any) {
for _, d := range vs.([]time.Time) {
b.(*array.Date32Builder).Append(arrow.Date32(d.Unix()))
}
},
},
{
logical: "time",
sc: arrow.NewSchema([]arrow.Field{{Type: arrow.FixedWidthTypes.Time64ns}}, nil),
values: []time.Time{time.Now(), time.Now()},
nrows: 2,
builder: array.NewTime64Builder(pool, arrow.FixedWidthTypes.Time64ns.(*arrow.Time64Type)),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Time64Builder).Append(arrow.Time64(t.UnixNano()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
arr := convertedRec.Column(0).(*array.Time64)
for i := 0; i < arr.Len(); i++ {
if srcvs[i].UnixNano() != int64(arr.Value(i)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "int64",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "struct",
values: []time.Time{time.Now(), localTime},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Microsecond), localTime.Truncate(time.Microsecond)},
arrowBatchesTimestampOption: ia.UseMicrosecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Microsecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseMillisecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Millisecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Second), localTime.Truncate(time.Second)},
arrowBatchesTimestampOption: ia.UseSecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Second)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "error",
values: []time.Time{localTimeFarIntoFuture},
error: "Cannot convert timestamp",
nrows: 1,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int { return 0 },
},
{
logical: "timestamp_ntz",
physical: "int64 with original timestamp",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond), localTimeFarIntoFuture.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_ntz"), 3, i, nil)
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ntz",
physical: "struct with original timestamp",
values: []time.Time{time.Now(), localTime, localTimeFarIntoFuture},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_ntz"), 9, i, nil)
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "int64",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "struct",
values: []time.Time{time.Now(), localTime},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Microsecond), localTime.Truncate(time.Microsecond)},
arrowBatchesTimestampOption: ia.UseMicrosecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Microsecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseMillisecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Millisecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "struct",
values: []time.Time{time.Now().Truncate(time.Second), localTime.Truncate(time.Second)},
arrowBatchesTimestampOption: ia.UseSecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampNtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampNtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Second)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "error",
values: []time.Time{localTimeFarIntoFuture},
error: "Cannot convert timestamp",
nrows: 1,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int { return 0 },
},
{
logical: "timestamp_ltz",
physical: "int64 with original timestamp",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond), localTimeFarIntoFuture.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.Int64Type{}}}, nil),
builder: array.NewInt64Builder(pool),
append: func(b array.Builder, vs any) {
for _, t := range vs.([]time.Time) {
b.(*array.Int64Builder).Append(t.UnixMilli())
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_ltz"), 3, i, localTime.Location())
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "timestamp_ltz",
physical: "struct with original timestamp",
values: []time.Time{time.Now(), localTime, localTimeFarIntoFuture},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampLtzStruct}}, nil),
builder: array.NewStructBuilder(pool, timestampLtzStruct),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_ltz"), 9, i, localTime.Location())
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct2",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithoutFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithoutFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.UnixMilli())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct3",
values: []time.Time{time.Now(), localTime},
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
sb.FieldBuilder(2).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Nanosecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct3",
values: []time.Time{time.Now().Truncate(time.Microsecond), localTime.Truncate(time.Microsecond)},
arrowBatchesTimestampOption: ia.UseMicrosecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
sb.FieldBuilder(2).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Microsecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct3",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseMillisecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
sb.FieldBuilder(2).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Millisecond)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct3",
values: []time.Time{time.Now().Truncate(time.Second), localTime.Truncate(time.Second)},
arrowBatchesTimestampOption: ia.UseSecondTimestamp,
nrows: 2,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
sb.FieldBuilder(2).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i, t := range convertedRec.Column(0).(*array.Timestamp).TimestampValues() {
if !srcvs[i].Equal(t.ToTime(arrow.Second)) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct2 with original timestamp",
values: []time.Time{time.Now().Truncate(time.Millisecond), localTime.Truncate(time.Millisecond), localTimeFarIntoFuture.Truncate(time.Millisecond)},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 3},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithoutFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithoutFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.UnixMilli())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_tz"), 3, i, nil)
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "timestamp_tz",
physical: "struct3 with original timestamp",
values: []time.Time{time.Now(), localTime, localTimeFarIntoFuture},
arrowBatchesTimestampOption: ia.UseOriginalTimestamp,
nrows: 3,
rowType: query.ExecResponseRowType{Scale: 9},
sc: arrow.NewSchema([]arrow.Field{{Type: timestampTzStructWithFraction}}, nil),
builder: array.NewStructBuilder(pool, timestampTzStructWithFraction),
append: func(b array.Builder, vs any) {
sb := b.(*array.StructBuilder)
valids = []bool{true, true, true}
sb.AppendValues(valids)
for _, t := range vs.([]time.Time) {
sb.FieldBuilder(0).(*array.Int64Builder).Append(t.Unix())
sb.FieldBuilder(1).(*array.Int32Builder).Append(int32(t.Nanosecond()))
sb.FieldBuilder(2).(*array.Int32Builder).Append(int32(0))
}
},
compare: func(src any, expected any, convertedRec arrow.Record) int {
srcvs := src.([]time.Time)
for i := 0; i < convertedRec.Column(0).Len(); i++ {
ts := ArrowSnowflakeTimestampToTime(convertedRec.Column(0), types.GetSnowflakeType("timestamp_tz"), 9, i, nil)
if !srcvs[i].Equal(*ts) {
return i
}
}
return -1
},
},
{
logical: "array",
values: [][]string{{"foo", "bar"}, {"baz", "quz", "quux"}},
nrows: 2,
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.StringType{}}}, nil),
builder: array.NewStringBuilder(pool),
append: func(b array.Builder, vs any) {
for _, a := range vs.([][]string) {
b.(*array.StringBuilder).Append(fmt.Sprint(a))
}
},
},
{
logical: "object",
values: []testObj{{0, "foo"}, {1, "bar"}},
nrows: 2,
sc: arrow.NewSchema([]arrow.Field{{Type: &arrow.StringType{}}}, nil),
builder: array.NewStringBuilder(pool),
append: func(b array.Builder, vs any) {
for _, o := range vs.([]testObj) {
b.(*array.StringBuilder).Append(fmt.Sprint(o))
}
},
},
} {
testName := tc.logical
if tc.physical != "" {
testName += " " + tc.physical
}
t.Run(testName, func(t *testing.T) {
scope := memory.NewCheckedAllocatorScope(pool)
defer scope.CheckSize(t)
b := tc.builder
defer b.Release()
tc.append(b, tc.values)
arr := b.NewArray()
defer arr.Release()
rawRec := array.NewRecord(tc.sc, []arrow.Array{arr}, int64(tc.nrows))
defer rawRec.Release()
meta := tc.rowType
meta.Type = tc.logical
ctx := context.Background()
switch tc.arrowBatchesTimestampOption {
case ia.UseOriginalTimestamp:
ctx = ia.WithTimestampOption(ctx, ia.UseOriginalTimestamp)
case ia.UseSecondTimestamp:
ctx = ia.WithTimestampOption(ctx, ia.UseSecondTimestamp)
case ia.UseMillisecondTimestamp:
ctx = ia.WithTimestampOption(ctx, ia.UseMillisecondTimestamp)
case ia.UseMicrosecondTimestamp:
ctx = ia.WithTimestampOption(ctx, ia.UseMicrosecondTimestamp)
default:
ctx = ia.WithTimestampOption(ctx, ia.UseNanosecondTimestamp)
}
if tc.enableArrowBatchesUtf8Validation {
ctx = ia.EnableUtf8Validation(ctx)
}
if tc.withHigherPrecision {
ctx = ia.WithHigherPrecision(ctx)
}
transformedRec, err := arrowToRecord(ctx, rawRec, pool, []query.ExecResponseRowType{meta}, localTime.Location())
if err != nil {
if tc.error == "" || !strings.Contains(err.Error(), tc.error) {
t.Fatalf("error: %s", err)
}
} else {
defer transformedRec.Release()
if tc.error != "" {
t.Fatalf("expected error: %s", tc.error)
}
if tc.compare != nil {
idx := tc.compare(tc.values, tc.expected, transformedRec)
if idx != -1 {
t.Fatalf("error: column array value mismatch at index %v", idx)
}
} else {
for i, c := range transformedRec.Columns() {
rawCol := rawRec.Column(i)
if rawCol != c {
t.Fatalf("error: expected column %s, got column %s", rawCol, c)
}
}
}
}
})
}
}
================================================
FILE: arrowbatches/schema.go
================================================
package arrowbatches
import (
"github.com/snowflakedb/gosnowflake/v2/internal/query"
"github.com/snowflakedb/gosnowflake/v2/internal/types"
"time"
ia "github.com/snowflakedb/gosnowflake/v2/internal/arrow"
"github.com/apache/arrow-go/v18/arrow"
)
func recordToSchema(sc *arrow.Schema, rowType []query.ExecResponseRowType, loc *time.Location, timestampOption ia.TimestampOption, withHigherPrecision bool) (*arrow.Schema, error) {
fields := recordToSchemaRecursive(sc.Fields(), rowType, loc, timestampOption, withHigherPrecision)
meta := sc.Metadata()
return arrow.NewSchema(fields, &meta), nil
}
func recordToSchemaRecursive(inFields []arrow.Field, rowType []query.ExecResponseRowType, loc *time.Location, timestampOption ia.TimestampOption, withHigherPrecision bool) []arrow.Field {
var outFields []arrow.Field
for i, f := range inFields {
fieldMetadata := rowType[i].ToFieldMetadata()
converted, t := recordToSchemaSingleField(fieldMetadata, f, withHigherPrecision, timestampOption, loc)
newField := f
if converted {
newField = arrow.Field{
Name: f.Name,
Type: t,
Nullable: f.Nullable,
Metadata: f.Metadata,
}
}
outFields = append(outFields, newField)
}
return outFields
}
func recordToSchemaSingleField(fieldMetadata query.FieldMetadata, f arrow.Field, withHigherPrecision bool, timestampOption ia.TimestampOption, loc *time.Location) (bool, arrow.DataType) {
t := f.Type
converted := true
switch types.GetSnowflakeType(fieldMetadata.Type) {
case types.FixedType:
switch f.Type.ID() {
case arrow.DECIMAL:
if withHigherPrecision {
converted = false
} else if fieldMetadata.Scale == 0 {
t = &arrow.Int64Type{}
} else {
t = &arrow.Float64Type{}
}
default:
if withHigherPrecision {
converted = false
} else if fieldMetadata.Scale != 0 {
t = &arrow.Float64Type{}
} else {
converted = false
}
}
case types.TimeType:
t = &arrow.Time64Type{Unit: arrow.Nanosecond}
case types.TimestampNtzType, types.TimestampTzType:
switch timestampOption {
case ia.UseOriginalTimestamp:
converted = false
case ia.UseMicrosecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Microsecond}
case ia.UseMillisecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Millisecond}
case ia.UseSecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Second}
default:
t = &arrow.TimestampType{Unit: arrow.Nanosecond}
}
case types.TimestampLtzType:
switch timestampOption {
case ia.UseOriginalTimestamp:
converted = false
case ia.UseMicrosecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Microsecond, TimeZone: loc.String()}
case ia.UseMillisecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Millisecond, TimeZone: loc.String()}
case ia.UseSecondTimestamp:
t = &arrow.TimestampType{Unit: arrow.Second, TimeZone: loc.String()}
default:
t = &arrow.TimestampType{Unit: arrow.Nanosecond, TimeZone: loc.String()}
}
case types.ObjectType:
converted = false
if f.Type.ID() == arrow.STRUCT {
var internalFields []arrow.Field
for idx, internalField := range f.Type.(*arrow.StructType).Fields() {
internalConverted, convertedDataType := recordToSchemaSingleField(fieldMetadata.Fields[idx], internalField, withHigherPrecision, timestampOption, loc)
converted = converted || internalConverted
if internalConverted {
newInternalField := arrow.Field{
Name: internalField.Name,
Type: convertedDataType,
Metadata: internalField.Metadata,
Nullable: internalField.Nullable,
}
internalFields = append(internalFields, newInternalField)
} else {
internalFields = append(internalFields, internalField)
}
}
t = arrow.StructOf(internalFields...)
}
case types.ArrayType:
if _, ok := f.Type.(*arrow.ListType); ok {
converted, dataType := recordToSchemaSingleField(fieldMetadata.Fields[0], f.Type.(*arrow.ListType).ElemField(), withHigherPrecision, timestampOption, loc)
if converted {
t = arrow.ListOf(dataType)
}
} else {
t = f.Type
}
case types.MapType:
convertedKey, keyDataType := recordToSchemaSingleField(fieldMetadata.Fields[0], f.Type.(*arrow.MapType).KeyField(), withHigherPrecision, timestampOption, loc)
convertedValue, valueDataType := recordToSchemaSingleField(fieldMetadata.Fields[1], f.Type.(*arrow.MapType).ItemField(), withHigherPrecision, timestampOption, loc)
converted = convertedKey || convertedValue
if converted {
t = arrow.MapOf(keyDataType, valueDataType)
}
default:
converted = false
}
return converted, t
}
================================================
FILE: assert_test.go
================================================
package gosnowflake
import (
"bytes"
"errors"
"fmt"
"math"
"reflect"
"regexp"
"slices"
"strings"
"testing"
"time"
)
func assertNilE(t *testing.T, actual any, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateNil(actual, descriptions...))
}
func assertNilF(t *testing.T, actual any, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateNil(actual, descriptions...))
}
func assertNotNilE(t *testing.T, actual any, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateNotNil(actual, descriptions...))
}
func assertNotNilF(t *testing.T, actual any, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateNotNil(actual, descriptions...))
}
func assertErrIsF(t *testing.T, actual, expected error, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateErrIs(actual, expected, descriptions...))
}
func assertErrIsE(t *testing.T, actual, expected error, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateErrIs(actual, expected, descriptions...))
}
func assertErrorsAsF(t *testing.T, err error, target any, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateErrorsAs(err, target, descriptions...))
}
func assertEqualE(t *testing.T, actual any, expected any, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEqual(actual, expected, descriptions...))
}
func assertEqualF(t *testing.T, actual any, expected any, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateEqual(actual, expected, descriptions...))
}
func assertEqualIgnoringWhitespaceE(t *testing.T, actual string, expected string, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEqualIgnoringWhitespace(actual, expected, descriptions...))
}
func assertEqualEpsilonE(t *testing.T, actual, expected, epsilon float64, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEqualEpsilon(actual, expected, epsilon, descriptions...))
}
func assertDeepEqualE(t *testing.T, actual any, expected any, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateDeepEqual(actual, expected, descriptions...))
}
func assertNotEqualF(t *testing.T, actual any, expected any, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateNotEqual(actual, expected, descriptions...))
}
func assertNotEqualE(t *testing.T, actual any, expected any, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateNotEqual(actual, expected, descriptions...))
}
func assertBytesEqualE(t *testing.T, actual []byte, expected []byte, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateBytesEqual(actual, expected, descriptions...))
}
func assertTrueF(t *testing.T, actual bool, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateEqual(actual, true, descriptions...))
}
func assertTrueE(t *testing.T, actual bool, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEqual(actual, true, descriptions...))
}
func assertFalseF(t *testing.T, actual bool, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateEqual(actual, false, descriptions...))
}
func assertFalseE(t *testing.T, actual bool, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEqual(actual, false, descriptions...))
}
func assertStringContainsE(t *testing.T, actual string, expectedToContain string, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateStringContains(actual, expectedToContain, descriptions...))
}
func assertStringContainsF(t *testing.T, actual string, expectedToContain string, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateStringContains(actual, expectedToContain, descriptions...))
}
func assertEmptyStringE(t *testing.T, actual string, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEmptyString(actual, descriptions...))
}
func assertHasPrefixF(t *testing.T, actual string, expectedPrefix string, descriptions ...string) {
t.Helper()
fatalOnNonEmpty(t, validateHasPrefix(actual, expectedPrefix, descriptions...))
}
func assertHasPrefixE(t *testing.T, actual string, expectedPrefix string, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateHasPrefix(actual, expectedPrefix, descriptions...))
}
func assertBetweenE(t *testing.T, value float64, min float64, max float64, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateValueBetween(value, min, max, descriptions...))
}
func assertBetweenInclusiveE(t *testing.T, value float64, min float64, max float64, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateValueBetweenInclusive(value, min, max, descriptions...))
}
func assertEmptyE[T any](t *testing.T, actual []T, descriptions ...string) {
t.Helper()
errorOnNonEmpty(t, validateEmpty(actual, descriptions...))
}
func fatalOnNonEmpty(t *testing.T, errMsg string) {
if errMsg != "" {
t.Helper()
t.Fatal(formatErrorMessage(errMsg))
}
}
func errorOnNonEmpty(t *testing.T, errMsg string) {
if errMsg != "" {
t.Helper()
t.Error(formatErrorMessage(errMsg))
}
}
func formatErrorMessage(errMsg string) string {
return fmt.Sprintf("[%s] %s", time.Now().Format(time.RFC3339Nano), maskSecrets(errMsg))
}
func validateNil(actual any, descriptions ...string) string {
if isNil(actual) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be nil but was not. %s", maskSecrets(fmt.Sprintf("%v", actual)), desc)
}
func validateNotNil(actual any, descriptions ...string) string {
if !isNil(actual) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected to be not nil but was not. %s", desc)
}
func validateErrIs(actual, expected error, descriptions ...string) string {
if errors.Is(actual, expected) {
return ""
}
desc := joinDescriptions(descriptions...)
actualStr := "nil"
expectedStr := "nil"
if actual != nil {
actualStr = maskSecrets(actual.Error())
}
if expected != nil {
expectedStr = maskSecrets(expected.Error())
}
return fmt.Sprintf("expected %v to be %v. %s", actualStr, expectedStr, desc)
}
func validateErrorsAs(err error, target any, descriptions ...string) string {
if errors.As(err, target) {
return ""
}
desc := joinDescriptions(descriptions...)
errStr := "nil"
if err != nil {
errStr = maskSecrets(err.Error())
}
targetType := reflect.TypeOf(target)
return fmt.Sprintf("expected error %v to be assignable to %v but was not. %s", errStr, targetType, desc)
}
func validateEqual(actual any, expected any, descriptions ...string) string {
if expected == actual {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be equal to \"%s\" but was not. %s",
maskSecrets(fmt.Sprintf("%v", actual)),
maskSecrets(fmt.Sprintf("%v", expected)),
desc)
}
func removeWhitespaces(s string) string {
pattern, err := regexp.Compile(`\s+`)
if err != nil {
panic(err)
}
return pattern.ReplaceAllString(s, "")
}
func validateEqualIgnoringWhitespace(actual string, expected string, descriptions ...string) string {
if removeWhitespaces(expected) == removeWhitespaces(actual) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be equal to \"%s\" but was not. %s",
maskSecrets(actual),
maskSecrets(expected),
desc)
}
func validateEqualEpsilon(actual, expected, epsilon float64, descriptions ...string) string {
if math.Abs(actual-expected) < epsilon {
return ""
}
return fmt.Sprintf("expected \"%f\" to be equal to \"%f\" within epsilon \"%f\" but was not. %s", actual, expected, epsilon, joinDescriptions(descriptions...))
}
func validateDeepEqual(actual any, expected any, descriptions ...string) string {
if reflect.DeepEqual(actual, expected) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be equal to \"%s\" but was not. %s",
maskSecrets(fmt.Sprintf("%v", actual)),
maskSecrets(fmt.Sprintf("%v", expected)),
desc)
}
func validateNotEqual(actual any, expected any, descriptions ...string) string {
if expected != actual {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" not to be equal to \"%s\" but they were the same. %s",
maskSecrets(fmt.Sprintf("%v", actual)),
maskSecrets(fmt.Sprintf("%v", expected)),
desc)
}
func validateBytesEqual(actual []byte, expected []byte, descriptions ...string) string {
if bytes.Equal(actual, expected) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be equal to \"%s\" but was not. %s",
maskSecrets(string(actual)),
maskSecrets(string(expected)),
desc)
}
func validateStringContains(actual string, expectedToContain string, descriptions ...string) string {
if strings.Contains(actual, expectedToContain) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to contain \"%s\" but did not. %s",
maskSecrets(actual),
maskSecrets(expectedToContain),
desc)
}
func validateEmptyString(actual string, descriptions ...string) string {
if actual == "" {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to be empty, but was not. %s", maskSecrets(actual), desc)
}
func validateHasPrefix(actual string, expectedPrefix string, descriptions ...string) string {
if strings.HasPrefix(actual, expectedPrefix) {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" to start with \"%s\" but did not. %s",
maskSecrets(actual),
maskSecrets(expectedPrefix),
desc)
}
func validateValueBetween(value float64, min float64, max float64, descriptions ...string) string {
if value > min && value < max {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" should be between \"%s\" and \"%s\" but did not. %s",
fmt.Sprintf("%f", value),
fmt.Sprintf("%f", min),
fmt.Sprintf("%f", max),
desc)
}
func validateValueBetweenInclusive(value float64, min float64, max float64, descriptions ...string) string {
if value >= min && value <= max {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%s\" should be between \"%s\" and \"%s\" inclusively but did not. %s",
fmt.Sprintf("%f", value),
fmt.Sprintf("%f", min),
fmt.Sprintf("%f", max),
desc)
}
func validateEmpty[T any](value []T, descriptions ...string) string {
if len(value) == 0 {
return ""
}
desc := joinDescriptions(descriptions...)
return fmt.Sprintf("expected \"%v\" to be empty. %s", maskSecrets(fmt.Sprintf("%v", value)), desc)
}
func joinDescriptions(descriptions ...string) string {
return strings.Join(descriptions, " ")
}
func isNil(value any) bool {
if value == nil {
return true
}
val := reflect.ValueOf(value)
return slices.Contains([]reflect.Kind{reflect.Pointer, reflect.Slice, reflect.Map, reflect.Interface, reflect.Func}, val.Kind()) && val.IsNil()
}
================================================
FILE: async.go
================================================
package gosnowflake
import (
"context"
"fmt"
"net/url"
"strconv"
"time"
)
func (sr *snowflakeRestful) processAsync(
ctx context.Context,
respd *execResponse,
headers map[string]string,
timeout time.Duration,
cfg *Config) (*execResponse, error) {
// placeholder object to return to user while retrieving results
rows := new(snowflakeRows)
res := new(snowflakeResult)
switch resType := getResultType(ctx); resType {
case execResultType:
res.queryID = respd.Data.QueryID
res.status = QueryStatusInProgress
res.errChannel = make(chan error)
respd.Data.AsyncResult = res
case queryResultType:
rows.queryID = respd.Data.QueryID
rows.status = QueryStatusInProgress
rows.errChannel = make(chan error)
rows.ctx = ctx
respd.Data.AsyncRows = rows
default:
return respd, nil
}
// spawn goroutine to retrieve asynchronous results
go GoroutineWrapper(
ctx,
func() {
err := sr.getAsync(ctx, headers, sr.getFullURL(respd.Data.GetResultURL, nil), timeout, res, rows, cfg)
if err != nil {
logger.WithContext(ctx).Errorf("error while calling getAsync. %v", err)
}
},
)
return respd, nil
}
func (sr *snowflakeRestful) getAsync(
ctx context.Context,
headers map[string]string,
URL *url.URL,
timeout time.Duration,
res *snowflakeResult,
rows *snowflakeRows,
cfg *Config) error {
resType := getResultType(ctx)
var errChannel chan error
sfError := &SnowflakeError{
Number: ErrAsync,
}
if resType == execResultType {
errChannel = res.errChannel
sfError.QueryID = res.queryID
} else {
errChannel = rows.errChannel
sfError.QueryID = rows.queryID
}
defer close(errChannel)
token, _, _ := sr.TokenAccessor.GetTokens()
headers[headerAuthorizationKey] = fmt.Sprintf(headerSnowflakeToken, token)
respd, err := getQueryResultWithRetriesForAsyncMode(ctx, sr, URL, headers, timeout)
if err != nil {
logger.WithContext(ctx).Errorf("error: %v", err)
sfError.Message = err.Error()
errChannel <- sfError
return err
}
sc := &snowflakeConn{rest: sr, cfg: cfg, currentTimeProvider: defaultTimeProvider}
if respd.Success {
if resType == execResultType {
res.insertID = -1
if isDml(respd.Data.StatementTypeID) {
res.affectedRows, err = updateRows(respd.Data)
if err != nil {
return err
}
} else if isMultiStmt(&respd.Data) {
r, err := sc.handleMultiExec(ctx, respd.Data)
if err != nil {
res.errChannel <- err
return err
}
res.affectedRows, err = r.RowsAffected()
if err != nil {
res.errChannel <- err
return err
}
}
res.queryID = respd.Data.QueryID
res.errChannel <- nil // mark exec status complete
} else {
rows.sc = sc
rows.queryID = respd.Data.QueryID
if isMultiStmt(&respd.Data) {
gitextract_osgz45y5/ ├── .cursor/ │ └── rules/ │ ├── overall-guidelines.mdc │ └── testing.mdc ├── .github/ │ ├── CODEOWNERS │ ├── ISSUE_TEMPLATE/ │ │ ├── BUG_REPORT.md │ │ └── FEATURE_REQUEST.md │ ├── ISSUE_TEMPLATE.md │ ├── PULL_REQUEST_TEMPLATE.md │ ├── repo_meta.yaml │ ├── secret_scanning.yml │ └── workflows/ │ ├── build-test.yml │ ├── changelog.yml │ ├── cla_bot.yml │ ├── jira_close.yml │ ├── jira_comment.yml │ ├── jira_issue.yml │ ├── parameters/ │ │ └── public/ │ │ ├── rsa_key_golang_aws.p8.gpg │ │ ├── rsa_key_golang_azure.p8.gpg │ │ └── rsa_key_golang_gcp.p8.gpg │ ├── parameters_aws_auth_tests.json.gpg │ ├── parameters_aws_golang.json.gpg │ ├── parameters_azure_golang.json.gpg │ ├── parameters_gcp_golang.json.gpg │ ├── rsa-2048-private-key.p8.gpg │ ├── rsa_keys/ │ │ ├── rsa_key.p8.gpg │ │ └── rsa_key_invalid.p8.gpg │ └── semgrep.yml ├── .gitignore ├── .golangci.yml ├── .pre-commit-config.yaml ├── .windsurf/ │ └── rules/ │ └── go.md ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Jenkinsfile ├── LICENSE ├── Makefile ├── README.md ├── SECURITY.md ├── aaa_test.go ├── arrow_chunk.go ├── arrow_stream.go ├── arrow_test.go ├── arrowbatches/ │ ├── batches.go │ ├── batches_test.go │ ├── context.go │ ├── converter.go │ ├── converter_test.go │ └── schema.go ├── assert_test.go ├── async.go ├── async_test.go ├── auth.go ├── auth_generic_test_methods_test.go ├── auth_oauth.go ├── auth_oauth_test.go ├── auth_test.go ├── auth_wif.go ├── auth_wif_test.go ├── auth_with_external_browser_test.go ├── auth_with_keypair_test.go ├── auth_with_mfa_test.go ├── auth_with_oauth_okta_authorization_code_test.go ├── auth_with_oauth_okta_client_credentials_test.go ├── auth_with_oauth_snowflake_authorization_code_test.go ├── auth_with_oauth_snowflake_authorization_code_wildcards_test.go ├── auth_with_oauth_test.go ├── auth_with_okta_test.go ├── auth_with_pat_test.go ├── authexternalbrowser.go ├── authexternalbrowser_test.go ├── authokta.go ├── authokta_test.go ├── azure_storage_client.go ├── azure_storage_client_test.go ├── bind_uploader.go ├── bindings_test.go ├── chunk.go ├── chunk_downloader.go ├── chunk_downloader_test.go ├── chunk_test.go ├── ci/ │ ├── _init.sh │ ├── build.bat │ ├── build.sh │ ├── container/ │ │ ├── test_authentication.sh │ │ └── test_component.sh │ ├── docker/ │ │ └── rockylinux9/ │ │ └── Dockerfile │ ├── gofix.sh │ ├── image/ │ │ ├── Dockerfile │ │ ├── build.sh │ │ ├── scripts/ │ │ │ └── entrypoint.sh │ │ └── update.sh │ ├── scripts/ │ │ ├── .gitignore │ │ ├── README.md │ │ ├── ca.crt │ │ ├── ca.der │ │ ├── ca.key │ │ ├── ca.srl │ │ ├── execute_tests.sh │ │ ├── hang_webserver.py │ │ ├── login_internal_docker.sh │ │ ├── run_wiremock.sh │ │ ├── setup_connection_parameters.sh │ │ ├── setup_gpg.sh │ │ ├── wiremock-ecdsa-pub.key │ │ ├── wiremock-ecdsa.crt │ │ ├── wiremock-ecdsa.csr │ │ ├── wiremock-ecdsa.key │ │ ├── wiremock-ecdsa.p12 │ │ ├── wiremock.crt │ │ ├── wiremock.csr │ │ ├── wiremock.key │ │ ├── wiremock.p12 │ │ └── wiremock.v3.ext │ ├── test.bat │ ├── test.sh │ ├── test_authentication.sh │ ├── test_revocation.sh │ ├── test_rockylinux9.sh │ ├── test_rockylinux9_docker.sh │ ├── test_wif.sh │ └── wif/ │ └── parameters/ │ ├── parameters_wif.json.gpg │ ├── rsa_wif_aws_azure.gpg │ └── rsa_wif_gcp.gpg ├── client.go ├── client_configuration.go ├── client_configuration_test.go ├── client_test.go ├── cmd/ │ ├── arrow/ │ │ ├── .gitignore │ │ ├── Makefile │ │ └── transform_batches_to_rows/ │ │ ├── Makefile │ │ └── transform_batches_to_rows.go │ ├── logger/ │ │ ├── Makefile │ │ └── logger.go │ ├── mfa/ │ │ ├── Makefile │ │ └── mfa.go │ ├── programmatic_access_token/ │ │ ├── .gitignore │ │ ├── Makefile │ │ └── pat.go │ ├── tomlfileconnection/ │ │ ├── .gitignore │ │ └── Makefile │ └── variant/ │ ├── Makefile │ └── insertvariantobject.go ├── codecov.yml ├── connection.go ├── connection_configuration_test.go ├── connection_test.go ├── connection_util.go ├── connectivity_diagnosis.go ├── connectivity_diagnosis_test.go ├── connector.go ├── connector_test.go ├── converter.go ├── converter_test.go ├── crl.go ├── crl_test.go ├── ctx_test.go ├── datatype.go ├── datatype_test.go ├── datetime.go ├── datetime_test.go ├── doc.go ├── driver.go ├── driver_ocsp_test.go ├── driver_test.go ├── dsn.go ├── easy_logging.go ├── easy_logging_test.go ├── encrypt_util.go ├── encrypt_util_test.go ├── errors.go ├── errors_test.go ├── file_compression_type.go ├── file_transfer_agent.go ├── file_transfer_agent_test.go ├── file_util.go ├── file_util_test.go ├── function_wrapper_test.go ├── function_wrappers.go ├── gcs_storage_client.go ├── gcs_storage_client_test.go ├── go.mod ├── go.sum ├── gosnowflake.mak ├── heartbeat.go ├── heartbeat_test.go ├── htap.go ├── htap_test.go ├── internal/ │ ├── arrow/ │ │ └── arrow.go │ ├── compilation/ │ │ ├── cgo_disabled.go │ │ ├── cgo_enabled.go │ │ ├── linking_mode.go │ │ ├── minicore_disabled.go │ │ └── minicore_enabled.go │ ├── config/ │ │ ├── assert_test.go │ │ ├── auth_type.go │ │ ├── config.go │ │ ├── config_bool.go │ │ ├── connection_configuration.go │ │ ├── connection_configuration_test.go │ │ ├── crl_mode.go │ │ ├── dsn.go │ │ ├── dsn_test.go │ │ ├── ocsp_mode.go │ │ ├── priv_key.go │ │ ├── tls_config.go │ │ ├── tls_config_test.go │ │ └── token_accessor.go │ ├── errors/ │ │ └── errors.go │ ├── logger/ │ │ ├── accessor.go │ │ ├── accessor_test.go │ │ ├── context.go │ │ ├── easy_logging_support.go │ │ ├── interfaces.go │ │ ├── level_filtering.go │ │ ├── optional_interfaces.go │ │ ├── proxy.go │ │ ├── secret_detector.go │ │ ├── secret_detector_test.go │ │ ├── secret_masking.go │ │ ├── secret_masking_test.go │ │ ├── slog_handler.go │ │ ├── slog_logger.go │ │ └── source_location_test.go │ ├── os/ │ │ ├── libc_info.go │ │ ├── libc_info_linux.go │ │ ├── libc_info_notlinux.go │ │ ├── libc_info_test.go │ │ ├── os_details.go │ │ ├── os_details_linux.go │ │ ├── os_details_notlinux.go │ │ ├── os_details_test.go │ │ └── test_data/ │ │ └── sample_os_release │ ├── query/ │ │ ├── response_types.go │ │ └── transform.go │ └── types/ │ └── types.go ├── local_storage_client.go ├── local_storage_client_test.go ├── location.go ├── location_test.go ├── locker.go ├── log.go ├── log_client_test.go ├── log_test.go ├── minicore.go ├── minicore_disabled_test.go ├── minicore_posix.go ├── minicore_provider_darwin_amd64.go ├── minicore_provider_darwin_arm64.go ├── minicore_provider_linux_amd64.go ├── minicore_provider_linux_arm64.go ├── minicore_provider_windows_amd64.go ├── minicore_provider_windows_arm64.go ├── minicore_test.go ├── minicore_windows.go ├── monitoring.go ├── multistatement.go ├── multistatement_test.go ├── ocsp.go ├── ocsp_test.go ├── old_driver_test.go ├── os_specific_posix.go ├── os_specific_windows.go ├── parameters.json.local ├── parameters.json.tmpl ├── permissions_test.go ├── platform_detection.go ├── platform_detection_test.go ├── prepared_statement_test.go ├── priv_key_test.go ├── put_get_test.go ├── put_get_user_stage_test.go ├── put_get_with_aws_test.go ├── query.go ├── restful.go ├── restful_test.go ├── result.go ├── retry.go ├── retry_test.go ├── rows.go ├── rows_test.go ├── s3_storage_client.go ├── s3_storage_client_test.go ├── secret_detector.go ├── secret_detector_test.go ├── secure_storage_manager.go ├── secure_storage_manager_linux.go ├── secure_storage_manager_notlinux.go ├── secure_storage_manager_test.go ├── sflog/ │ ├── interface.go │ ├── levels.go │ └── slog.go ├── sqlstate.go ├── statement.go ├── statement_test.go ├── storage_client.go ├── storage_client_test.go ├── storage_file_util_test.go ├── structured_type.go ├── structured_type_arrow_batches_test.go ├── structured_type_read_test.go ├── structured_type_write_test.go ├── telemetry.go ├── telemetry_test.go ├── test_data/ │ ├── .gitignore │ ├── connections.toml │ ├── multistatements.sql │ ├── multistatements_drop.sql │ ├── orders_100.csv │ ├── orders_101.csv │ ├── put_get_1.txt │ ├── snowflake/ │ │ └── session/ │ │ └── token │ ├── userdata1.parquet │ ├── userdata1_orc │ └── wiremock/ │ └── mappings/ │ ├── auth/ │ │ ├── external_browser/ │ │ │ ├── parallel_login_first_fails_then_successful_flow.json │ │ │ ├── parallel_login_successful_flow.json │ │ │ └── successful_flow.json │ │ ├── mfa/ │ │ │ ├── parallel_login_first_fails_then_successful_flow.json │ │ │ └── parallel_login_successful_flow.json │ │ ├── oauth2/ │ │ │ ├── authorization_code/ │ │ │ │ ├── error_from_idp.json │ │ │ │ ├── invalid_code.json │ │ │ │ ├── successful_flow.json │ │ │ │ ├── successful_flow_with_offline_access.json │ │ │ │ └── successful_flow_with_single_use_refresh_token.json │ │ │ ├── client_credentials/ │ │ │ │ ├── invalid_client.json │ │ │ │ └── successful_flow.json │ │ │ ├── login_request.json │ │ │ ├── login_request_with_expired_access_token.json │ │ │ └── refresh_token/ │ │ │ ├── invalid_refresh_token.json │ │ │ ├── successful_flow.json │ │ │ └── successful_flow_without_new_refresh_token.json │ │ ├── password/ │ │ │ ├── invalid_host.json │ │ │ ├── invalid_password.json │ │ │ ├── invalid_user.json │ │ │ ├── successful_flow.json │ │ │ └── successful_flow_with_telemetry.json │ │ ├── pat/ │ │ │ ├── invalid_token.json │ │ │ ├── reading_fresh_token.json │ │ │ └── successful_flow.json │ │ └── wif/ │ │ ├── azure/ │ │ │ ├── http_error.json │ │ │ ├── missing_issuer_claim.json │ │ │ ├── missing_sub_claim.json │ │ │ ├── non_json_response.json │ │ │ ├── successful_flow_azure_functions.json │ │ │ ├── successful_flow_azure_functions_custom_entra_resource.json │ │ │ ├── successful_flow_azure_functions_no_client_id.json │ │ │ ├── successful_flow_azure_functions_v2_issuer.json │ │ │ ├── successful_flow_basic.json │ │ │ ├── successful_flow_v2_issuer.json │ │ │ └── unparsable_token.json │ │ └── gcp/ │ │ ├── http_error.json │ │ ├── missing_issuer_claim.json │ │ ├── missing_sub_claim.json │ │ ├── successful_flow.json │ │ ├── successful_impersionation_flow.json │ │ └── unparsable_token.json │ ├── close_session.json │ ├── hang.json │ ├── minicore/ │ │ └── auth/ │ │ ├── disabled_flow.json │ │ ├── successful_flow.json │ │ └── successful_flow_linux.json │ ├── ocsp/ │ │ ├── auth_failure.json │ │ ├── malformed.json │ │ └── unauthorized.json │ ├── platform_detection/ │ │ ├── aws_ec2_instance_success.json │ │ ├── aws_identity_success.json │ │ ├── azure_managed_identity_success.json │ │ ├── azure_vm_success.json │ │ ├── gce_identity_success.json │ │ ├── gce_vm_success.json │ │ └── timeout_response.json │ ├── query/ │ │ ├── long_running_query.json │ │ ├── query_by_id_timeout.json │ │ ├── query_execution.json │ │ ├── query_monitoring.json │ │ ├── query_monitoring_error.json │ │ ├── query_monitoring_malformed.json │ │ └── query_monitoring_running.json │ ├── retry/ │ │ └── redirection_retry_workflow.json │ ├── select1.json │ └── telemetry/ │ ├── custom_telemetry.json │ └── telemetry.json ├── test_utils_test.go ├── tls_config.go ├── tls_config_test.go ├── transaction.go ├── transaction_test.go ├── transport.go ├── transport_test.go ├── url_util.go ├── util.go ├── util_test.go ├── uuid.go ├── value_awaiter.go ├── version.go └── wiremock_test.go
Showing preview only (325K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (3380 symbols across 214 files)
FILE: aaa_test.go
function TestShowServerVersion (line 7) | func TestShowServerVersion(t *testing.T) {
FILE: arrow_chunk.go
type arrowResultChunk (line 15) | type arrowResultChunk struct
method decodeArrowChunk (line 22) | func (arc *arrowResultChunk) decodeArrowChunk(ctx context.Context, row...
method decodeArrowBatchRaw (line 59) | func (arc *arrowResultChunk) decodeArrowBatchRaw() (*[]arrow.Record, e...
function buildFirstArrowChunk (line 73) | func buildFirstArrowChunk(rowsetBase64 string, loc *time.Location, alloc...
FILE: arrow_stream.go
type ArrowStreamLoader (line 28) | type ArrowStreamLoader interface
type ArrowStreamBatch (line 40) | type ArrowStreamBatch struct
method NumRows (line 50) | func (asb *ArrowStreamBatch) NumRows() int64 { return asb.numrows }
method GetStream (line 55) | func (asb *ArrowStreamBatch) GetStream(ctx context.Context) (io.ReadCl...
method downloadChunkStreamHelper (line 79) | func (asb *ArrowStreamBatch) downloadChunkStreamHelper(ctx context.Con...
type streamWrapReader (line 65) | type streamWrapReader struct
method Close (line 70) | func (w *streamWrapReader) Close() error {
type snowflakeArrowStreamChunkDownloader (line 133) | type snowflakeArrowStreamChunkDownloader struct
method Location (line 144) | func (scd *snowflakeArrowStreamChunkDownloader) Location() *time.Locat...
method TotalRows (line 151) | func (scd *snowflakeArrowStreamChunkDownloader) TotalRows() int64 { re...
method RowTypes (line 153) | func (scd *snowflakeArrowStreamChunkDownloader) RowTypes() []query.Exe...
method JSONData (line 157) | func (scd *snowflakeArrowStreamChunkDownloader) JSONData() [][]*string {
method maybeFirstBatch (line 161) | func (scd *snowflakeArrowStreamChunkDownloader) maybeFirstBatch() ([]b...
method GetBatches (line 182) | func (scd *snowflakeArrowStreamChunkDownloader) GetBatches() (out []Ar...
method NextResultSet (line 219) | func (scd *snowflakeArrowStreamChunkDownloader) NextResultSet(ctx cont...
method hasNextResultSet (line 254) | func (scd *snowflakeArrowStreamChunkDownloader) hasNextResultSet() bool {
FILE: arrow_test.go
function TestArrowBatchDataProvider (line 19) | func TestArrowBatchDataProvider(t *testing.T) {
function TestArrowBigInt (line 60) | func TestArrowBigInt(t *testing.T) {
function TestArrowBigFloat (line 98) | func TestArrowBigFloat(t *testing.T) {
function TestArrowIntPrecision (line 138) | func TestArrowIntPrecision(t *testing.T) {
function TestArrowFloatPrecision (line 230) | func TestArrowFloatPrecision(t *testing.T) {
function TestArrowTimePrecision (line 360) | func TestArrowTimePrecision(t *testing.T) {
function TestArrowVariousTypes (line 449) | func TestArrowVariousTypes(t *testing.T) {
function TestArrowMemoryCleanedUp (line 556) | func TestArrowMemoryCleanedUp(t *testing.T) {
FILE: arrowbatches/batches.go
type ArrowBatch (line 18) | type ArrowBatch struct
method WithContext (line 26) | func (rb *ArrowBatch) WithContext(ctx context.Context) *ArrowBatch {
method Fetch (line 33) | func (rb *ArrowBatch) Fetch() (*[]arrow.Record, error) {
method GetRowCount (line 76) | func (rb *ArrowBatch) GetRowCount() int {
method GetLocation (line 81) | func (rb *ArrowBatch) GetLocation() *time.Location {
method GetRowTypes (line 86) | func (rb *ArrowBatch) GetRowTypes() []query.ExecResponseRowType {
method ArrowSnowflakeTimestampToTime (line 91) | func (rb *ArrowBatch) ArrowSnowflakeTimestampToTime(rec arrow.Record, ...
method GetAllocator (line 133) | func (rb *ArrowBatch) GetAllocator() memory.Allocator {
function GetArrowBatches (line 99) | func GetArrowBatches(rows sf.SnowflakeRows) ([]*ArrowBatch, error) {
function countArrowBatchRows (line 125) | func countArrowBatchRows(recs *[]arrow.Record) (cnt int) {
FILE: arrowbatches/batches_test.go
type testConn (line 30) | type testConn struct
method close (line 140) | func (tc *testConn) close() {
method queryRows (line 147) | func (tc *testConn) queryRows(ctx context.Context, t *testing.T, query...
function repoRoot (line 37) | func repoRoot(t *testing.T) string {
function readPrivateKey (line 61) | func readPrivateKey(t *testing.T, path string) *rsa.PrivateKey {
function testConfig (line 85) | func testConfig(t *testing.T) *sf.Config {
function openTestConn (line 121) | func openTestConn(ctx context.Context, t *testing.T) *testConn {
function queryRawRows (line 172) | func queryRawRows(ctx context.Context, t *testing.T, query string) (sf.S...
function TestGetArrowBatches (line 182) | func TestGetArrowBatches(t *testing.T) {
function TestGetArrowBatchesHighPrecision (line 215) | func TestGetArrowBatchesHighPrecision(t *testing.T) {
function TestGetArrowBatchesLargeResultSet (line 247) | func TestGetArrowBatchesLargeResultSet(t *testing.T) {
function TestGetArrowBatchesWithTimestampOption (line 305) | func TestGetArrowBatchesWithTimestampOption(t *testing.T) {
function TestGetArrowBatchesJSONResponseError (line 341) | func TestGetArrowBatchesJSONResponseError(t *testing.T) {
function TestTimestampConversionDistantDates (line 406) | func TestTimestampConversionDistantDates(t *testing.T) {
function TestTimestampConversionWithOriginalTimestamp (line 502) | func TestTimestampConversionWithOriginalTimestamp(t *testing.T) {
FILE: arrowbatches/context.go
constant UseNanosecondTimestamp (line 11) | UseNanosecondTimestamp = ia.UseNanosecondTimestamp
constant UseMicrosecondTimestamp (line 12) | UseMicrosecondTimestamp = ia.UseMicrosecondTimestamp
constant UseMillisecondTimestamp (line 13) | UseMillisecondTimestamp = ia.UseMillisecondTimestamp
constant UseSecondTimestamp (line 14) | UseSecondTimestamp = ia.UseSecondTimestamp
constant UseOriginalTimestamp (line 15) | UseOriginalTimestamp = ia.UseOriginalTimestamp
function WithArrowBatches (line 19) | func WithArrowBatches(ctx context.Context) context.Context {
function WithTimestampOption (line 25) | func WithTimestampOption(ctx context.Context, option ia.TimestampOption)...
function WithUtf8Validation (line 31) | func WithUtf8Validation(ctx context.Context) context.Context {
FILE: arrowbatches/converter.go
function arrowToRecord (line 26) | func arrowToRecord(ctx context.Context, record arrow.Record, pool memory...
function arrowToRecordSingleColumn (line 53) | func arrowToRecordSingleColumn(ctx context.Context, field arrow.Field, c...
function arrowStringRecordToColumn (line 219) | func arrowStringRecordToColumn(
function intToBigFloat (line 247) | func intToBigFloat(val int64, scale int64) *big.Float {
function ArrowSnowflakeTimestampToTime (line 254) | func ArrowSnowflakeTimestampToTime(
function extractEpoch (line 312) | func extractEpoch(value int64, scale int) int64 {
function extractFraction (line 316) | func extractFraction(value int64, scale int) int64 {
FILE: arrowbatches/converter_test.go
function stringIntToDecimal (line 23) | func stringIntToDecimal(src string) (decimal128.Num, bool) {
function decimalToBigInt (line 33) | func decimalToBigInt(num decimal128.Num) *big.Int {
function TestArrowToRecord (line 39) | func TestArrowToRecord(t *testing.T) {
FILE: arrowbatches/schema.go
function recordToSchema (line 13) | func recordToSchema(sc *arrow.Schema, rowType []query.ExecResponseRowTyp...
function recordToSchemaRecursive (line 19) | func recordToSchemaRecursive(inFields []arrow.Field, rowType []query.Exe...
function recordToSchemaSingleField (line 39) | func recordToSchemaSingleField(fieldMetadata query.FieldMetadata, f arro...
FILE: assert_test.go
function assertNilE (line 16) | func assertNilE(t *testing.T, actual any, descriptions ...string) {
function assertNilF (line 21) | func assertNilF(t *testing.T, actual any, descriptions ...string) {
function assertNotNilE (line 26) | func assertNotNilE(t *testing.T, actual any, descriptions ...string) {
function assertNotNilF (line 31) | func assertNotNilF(t *testing.T, actual any, descriptions ...string) {
function assertErrIsF (line 36) | func assertErrIsF(t *testing.T, actual, expected error, descriptions ......
function assertErrIsE (line 41) | func assertErrIsE(t *testing.T, actual, expected error, descriptions ......
function assertErrorsAsF (line 46) | func assertErrorsAsF(t *testing.T, err error, target any, descriptions ....
function assertEqualE (line 51) | func assertEqualE(t *testing.T, actual any, expected any, descriptions ....
function assertEqualF (line 56) | func assertEqualF(t *testing.T, actual any, expected any, descriptions ....
function assertEqualIgnoringWhitespaceE (line 61) | func assertEqualIgnoringWhitespaceE(t *testing.T, actual string, expecte...
function assertEqualEpsilonE (line 66) | func assertEqualEpsilonE(t *testing.T, actual, expected, epsilon float64...
function assertDeepEqualE (line 71) | func assertDeepEqualE(t *testing.T, actual any, expected any, descriptio...
function assertNotEqualF (line 76) | func assertNotEqualF(t *testing.T, actual any, expected any, description...
function assertNotEqualE (line 81) | func assertNotEqualE(t *testing.T, actual any, expected any, description...
function assertBytesEqualE (line 86) | func assertBytesEqualE(t *testing.T, actual []byte, expected []byte, des...
function assertTrueF (line 91) | func assertTrueF(t *testing.T, actual bool, descriptions ...string) {
function assertTrueE (line 96) | func assertTrueE(t *testing.T, actual bool, descriptions ...string) {
function assertFalseF (line 101) | func assertFalseF(t *testing.T, actual bool, descriptions ...string) {
function assertFalseE (line 106) | func assertFalseE(t *testing.T, actual bool, descriptions ...string) {
function assertStringContainsE (line 111) | func assertStringContainsE(t *testing.T, actual string, expectedToContai...
function assertStringContainsF (line 116) | func assertStringContainsF(t *testing.T, actual string, expectedToContai...
function assertEmptyStringE (line 121) | func assertEmptyStringE(t *testing.T, actual string, descriptions ...str...
function assertHasPrefixF (line 126) | func assertHasPrefixF(t *testing.T, actual string, expectedPrefix string...
function assertHasPrefixE (line 131) | func assertHasPrefixE(t *testing.T, actual string, expectedPrefix string...
function assertBetweenE (line 136) | func assertBetweenE(t *testing.T, value float64, min float64, max float6...
function assertBetweenInclusiveE (line 141) | func assertBetweenInclusiveE(t *testing.T, value float64, min float64, m...
function assertEmptyE (line 146) | func assertEmptyE[T any](t *testing.T, actual []T, descriptions ...strin...
function fatalOnNonEmpty (line 151) | func fatalOnNonEmpty(t *testing.T, errMsg string) {
function errorOnNonEmpty (line 158) | func errorOnNonEmpty(t *testing.T, errMsg string) {
function formatErrorMessage (line 165) | func formatErrorMessage(errMsg string) string {
function validateNil (line 169) | func validateNil(actual any, descriptions ...string) string {
function validateNotNil (line 177) | func validateNotNil(actual any, descriptions ...string) string {
function validateErrIs (line 185) | func validateErrIs(actual, expected error, descriptions ...string) string {
function validateErrorsAs (line 201) | func validateErrorsAs(err error, target any, descriptions ...string) str...
function validateEqual (line 214) | func validateEqual(actual any, expected any, descriptions ...string) str...
function removeWhitespaces (line 225) | func removeWhitespaces(s string) string {
function validateEqualIgnoringWhitespace (line 233) | func validateEqualIgnoringWhitespace(actual string, expected string, des...
function validateEqualEpsilon (line 244) | func validateEqualEpsilon(actual, expected, epsilon float64, description...
function validateDeepEqual (line 251) | func validateDeepEqual(actual any, expected any, descriptions ...string)...
function validateNotEqual (line 262) | func validateNotEqual(actual any, expected any, descriptions ...string) ...
function validateBytesEqual (line 273) | func validateBytesEqual(actual []byte, expected []byte, descriptions ......
function validateStringContains (line 284) | func validateStringContains(actual string, expectedToContain string, des...
function validateEmptyString (line 295) | func validateEmptyString(actual string, descriptions ...string) string {
function validateHasPrefix (line 303) | func validateHasPrefix(actual string, expectedPrefix string, description...
function validateValueBetween (line 314) | func validateValueBetween(value float64, min float64, max float64, descr...
function validateValueBetweenInclusive (line 326) | func validateValueBetweenInclusive(value float64, min float64, max float...
function validateEmpty (line 338) | func validateEmpty[T any](value []T, descriptions ...string) string {
function joinDescriptions (line 346) | func joinDescriptions(descriptions ...string) string {
function isNil (line 350) | func isNil(value any) bool {
FILE: async.go
method processAsync (line 11) | func (sr *snowflakeRestful) processAsync(
method getAsync (line 49) | func (sr *snowflakeRestful) getAsync(
function getQueryResultWithRetriesForAsyncMode (line 141) | func getQueryResultWithRetriesForAsyncMode(
FILE: async_test.go
function TestAsyncMode (line 10) | func TestAsyncMode(t *testing.T) {
function TestAsyncModePing (line 46) | func TestAsyncModePing(t *testing.T) {
function TestAsyncModeMultiStatement (line 62) | func TestAsyncModeMultiStatement(t *testing.T) {
function TestAsyncModeCancel (line 89) | func TestAsyncModeCancel(t *testing.T) {
function TestAsyncQueryFail (line 100) | func TestAsyncQueryFail(t *testing.T) {
function TestMultipleAsyncQueries (line 119) | func TestMultipleAsyncQueries(t *testing.T) {
function retrieveRows (line 153) | func retrieveRows(rows *sql.Rows, ch chan string) {
function TestLongRunningAsyncQuery (line 170) | func TestLongRunningAsyncQuery(t *testing.T) {
function TestLongRunningAsyncQueryFetchResultByID (line 201) | func TestLongRunningAsyncQueryFetchResultByID(t *testing.T) {
FILE: auth.go
constant clientType (line 30) | clientType = "Go"
constant clientStoreTemporaryCredential (line 34) | clientStoreTemporaryCredential = "CLIENT_STORE_TEMPORARY_CREDENTIAL"
constant clientRequestMfaToken (line 35) | clientRequestMfaToken = "CLIENT_REQUEST_MFA_TOKEN"
constant idTokenAuthenticator (line 36) | idTokenAuthenticator = "ID_TOKEN"
constant AuthTypeSnowflake (line 44) | AuthTypeSnowflake = sfconfig.AuthTypeSnowflake
constant AuthTypeOAuth (line 46) | AuthTypeOAuth = sfconfig.AuthTypeOAuth
constant AuthTypeExternalBrowser (line 48) | AuthTypeExternalBrowser = sfconfig.AuthTypeExternalBrowser
constant AuthTypeOkta (line 50) | AuthTypeOkta = sfconfig.AuthTypeOkta
constant AuthTypeJwt (line 52) | AuthTypeJwt = sfconfig.AuthTypeJwt
constant AuthTypeTokenAccessor (line 54) | AuthTypeTokenAccessor = sfconfig.AuthTypeTokenAccessor
constant AuthTypeUsernamePasswordMFA (line 56) | AuthTypeUsernamePasswordMFA = sfconfig.AuthTypeUsernamePasswordMFA
constant AuthTypePat (line 58) | AuthTypePat = sfconfig.AuthTypePat
constant AuthTypeOAuthAuthorizationCode (line 60) | AuthTypeOAuthAuthorizationCode = sfconfig.AuthTypeOAuthAuthorizationCode
constant AuthTypeOAuthClientCredentials (line 62) | AuthTypeOAuthClientCredentials = sfconfig.AuthTypeOAuthClientCredentials
constant AuthTypeWorkloadIdentityFederation (line 64) | AuthTypeWorkloadIdentityFederation = sfconfig.AuthTypeWorkloadIdentityFe...
function isOauthNativeFlow (line 67) | func isOauthNativeFlow(authType AuthType) bool {
type authRequestClientEnvironment (line 86) | type authRequestClientEnvironment struct
type authRequestData (line 107) | type authRequestData struct
type authRequest (line 125) | type authRequest struct
type nameValueParameter (line 129) | type nameValueParameter struct
type authResponseSessionInfo (line 134) | type authResponseSessionInfo struct
type authResponseMain (line 141) | type authResponseMain struct
type authResponse (line 165) | type authResponse struct
function postAuth (line 172) | func postAuth(
function getHeaders (line 239) | func getHeaders() map[string]string {
function authenticate (line 250) | func authenticate(
function newAuthRequestClientEnvironment (line 369) | func newAuthRequestClientEnvironment() authRequestClientEnvironment {
function createRequestBody (line 419) | func createRequestBody(sc *snowflakeConn, sessionParameters map[string]any,
type oauthLockKey (line 551) | type oauthLockKey struct
method lockID (line 573) | func (o *oauthLockKey) lockID() string {
function newOAuthAuthorizationCodeLockKey (line 557) | func newOAuthAuthorizationCodeLockKey(tokenRequestURL, user string) *oau...
function newRefreshTokenLockKey (line 565) | func newRefreshTokenLockKey(tokenRequestURL, user string) *oauthLockKey {
function authenticateByAuthorizationCode (line 577) | func authenticateByAuthorizationCode(sc *snowflakeConn) (string, error) {
function prepareJWTToken (line 608) | func prepareJWTToken(config *Config) (string, error) {
type tokenLockKey (line 642) | type tokenLockKey struct
method lockID (line 664) | func (m *tokenLockKey) lockID() string {
function newMfaTokenLockKey (line 648) | func newMfaTokenLockKey(snowflakeHost, user string) *tokenLockKey {
function newIDTokenLockKey (line 656) | func newIDTokenLockKey(snowflakeHost, user string) *tokenLockKey {
function authenticateWithConfig (line 668) | func authenticateWithConfig(sc *snowflakeConn) error {
function doRefreshTokenWithLock (line 779) | func doRefreshTokenWithLock(sc *snowflakeConn) {
function chooseLockerForAuth (line 797) | func chooseLockerForAuth(cfg *Config) locker {
function isEligibleForParallelLogin (line 807) | func isEligibleForParallelLogin(cfg *Config, cacheEnabled ConfigBool) bo...
FILE: auth_generic_test_methods_test.go
function getAuthTestConfigFromEnv (line 9) | func getAuthTestConfigFromEnv() (*Config, error) {
function getAuthTestsConfig (line 22) | func getAuthTestsConfig(t *testing.T, authMethod AuthType) (*Config, err...
function isTestRunningInDockerContainer (line 31) | func isTestRunningInDockerContainer() bool {
FILE: auth_oauth.go
constant oauthSuccessHTML (line 25) | oauthSuccessHTML = `<!DOCTYPE html><html><head><meta charset="UTF-8"/>
constant localApplicationClientCredentials (line 30) | localApplicationClientCredentials = "LOCAL_APPLICATION"
type oauthClient (line 37) | type oauthClient struct
method authenticateByOAuthAuthorizationCode (line 93) | func (oauthClient *oauthClient) authenticateByOAuthAuthorizationCode()...
method doAuthenticateByOAuthAuthorizationCode (line 133) | func (oauthClient *oauthClient) doAuthenticateByOAuthAuthorizationCode...
method setupListener (line 187) | func (oauthClient *oauthClient) setupListener() (*net.TCPListener, int...
method exchangeAccessToken (line 197) | func (oauthClient *oauthClient) exchangeAccessToken(codeReq *http.Requ...
method buildAuthorizationCodeConfig (line 228) | func (oauthClient *oauthClient) buildAuthorizationCodeConfig(callbackP...
method eligibleForDefaultClientCredentials (line 247) | func (oauthClient *oauthClient) eligibleForDefaultClientCredentials() ...
method isSnowflakeAsIDP (line 251) | func (oauthClient *oauthClient) isSnowflakeAsIDP() bool {
method authorizationURL (line 256) | func (oauthClient *oauthClient) authorizationURL() string {
method defaultAuthorizationURL (line 260) | func (oauthClient *oauthClient) defaultAuthorizationURL() string {
method tokenURL (line 264) | func (oauthClient *oauthClient) tokenURL() string {
method defaultTokenURL (line 268) | func (oauthClient *oauthClient) defaultTokenURL() string {
method buildRedirectURI (line 272) | func (oauthClient *oauthClient) buildRedirectURI(port int) string {
method buildScopes (line 279) | func (oauthClient *oauthClient) buildScopes() []string {
method authenticateByOAuthClientCredentials (line 354) | func (oauthClient *oauthClient) authenticateByOAuthClientCredentials()...
method buildClientCredentialsConfig (line 375) | func (oauthClient *oauthClient) buildClientCredentialsConfig() (*clien...
method refreshToken (line 387) | func (oauthClient *oauthClient) refreshToken() error {
method accessTokenSpec (line 442) | func (oauthClient *oauthClient) accessTokenSpec() *secureTokenSpec {
method refreshTokenSpec (line 446) | func (oauthClient *oauthClient) refreshTokenSpec() *secureTokenSpec {
method logIfHTTPInUse (line 450) | func (oauthClient *oauthClient) logIfHTTPInUse(u string) {
function newOauthClient (line 48) | func newOauthClient(ctx context.Context, cfg *Config, sc *snowflakeConn)...
type oauthBrowserResult (line 87) | type oauthBrowserResult struct
function handleOAuthSocket (line 290) | func handleOAuthSocket(tcpListener *net.TCPListener, successChan chan []...
type authorizationCodeProvider (line 333) | type authorizationCodeProvider interface
type browserBasedAuthorizationCodeProvider (line 339) | type browserBasedAuthorizationCodeProvider struct
method run (line 342) | func (provider *browserBasedAuthorizationCodeProvider) run(authorizati...
method createState (line 346) | func (provider *browserBasedAuthorizationCodeProvider) createState() s...
method createCodeVerifier (line 350) | func (provider *browserBasedAuthorizationCodeProvider) createCodeVerif...
type tokenExchangeResponseBody (line 437) | type tokenExchangeResponseBody struct
FILE: auth_oauth_test.go
function TestUnitOAuthAuthorizationCode (line 17) | func TestUnitOAuthAuthorizationCode(t *testing.T) {
function TestUnitOAuthClientCredentials (line 167) | func TestUnitOAuthClientCredentials(t *testing.T) {
function TestAuthorizationCodeFlow (line 260) | func TestAuthorizationCodeFlow(t *testing.T) {
function TestClientCredentialsFlow (line 529) | func TestClientCredentialsFlow(t *testing.T) {
function TestEligibleForDefaultClientCredentials (line 637) | func TestEligibleForDefaultClientCredentials(t *testing.T) {
type nonInteractiveAuthorizationCodeProvider (line 720) | type nonInteractiveAuthorizationCodeProvider struct
method run (line 729) | func (provider *nonInteractiveAuthorizationCodeProvider) run(authoriza...
method createState (line 752) | func (provider *nonInteractiveAuthorizationCodeProvider) createState()...
method createCodeVerifier (line 759) | func (provider *nonInteractiveAuthorizationCodeProvider) createCodeVer...
method assertResponseBodyContains (line 763) | func (provider *nonInteractiveAuthorizationCodeProvider) assertRespons...
FILE: auth_test.go
function TestUnitPostAuth (line 24) | func TestUnitPostAuth(t *testing.T) {
function postAuthFailServiceIssue (line 59) | func postAuthFailServiceIssue(_ context.Context, _ *snowflakeRestful, _ ...
function postAuthFailWrongAccount (line 65) | func postAuthFailWrongAccount(_ context.Context, _ *snowflakeRestful, _ ...
function postAuthFailUnknown (line 71) | func postAuthFailUnknown(_ context.Context, _ *snowflakeRestful, _ *http...
function postAuthSuccessWithErrorCode (line 77) | func postAuthSuccessWithErrorCode(_ context.Context, _ *snowflakeRestful...
function postAuthSuccessWithInvalidErrorCode (line 85) | func postAuthSuccessWithInvalidErrorCode(_ context.Context, _ *snowflake...
function postAuthSuccess (line 93) | func postAuthSuccess(_ context.Context, _ *snowflakeRestful, _ *http.Cli...
function postAuthCheckSAMLResponse (line 106) | func postAuthCheckSAMLResponse(_ context.Context, _ *snowflakeRestful, _...
function postAuthCheckOAuth (line 132) | func postAuthCheckOAuth(
function postAuthCheckPasscode (line 166) | func postAuthCheckPasscode(_ context.Context, _ *snowflakeRestful, _ *ht...
function postAuthCheckPasscodeInPassword (line 187) | func postAuthCheckPasscodeInPassword(_ context.Context, _ *snowflakeRest...
function postAuthCheckUsernamePasswordMfa (line 208) | func postAuthCheckUsernamePasswordMfa(_ context.Context, _ *snowflakeRes...
function postAuthCheckUsernamePasswordMfaToken (line 231) | func postAuthCheckUsernamePasswordMfaToken(_ context.Context, _ *snowfla...
function postAuthCheckUsernamePasswordMfaFailed (line 254) | func postAuthCheckUsernamePasswordMfaFailed(_ context.Context, _ *snowfl...
function postAuthCheckExternalBrowser (line 272) | func postAuthCheckExternalBrowser(_ context.Context, _ *snowflakeRestful...
function postAuthCheckExternalBrowserToken (line 295) | func postAuthCheckExternalBrowserToken(_ context.Context, _ *snowflakeRe...
function postAuthCheckExternalBrowserFailed (line 318) | func postAuthCheckExternalBrowserFailed(_ context.Context, _ *snowflakeR...
type restfulTestWrapper (line 336) | type restfulTestWrapper struct
method postAuthOktaWithNewToken (line 340) | func (rtw restfulTestWrapper) postAuthOktaWithNewToken(_ context.Conte...
function getDefaultSnowflakeConn (line 386) | func getDefaultSnowflakeConn() *snowflakeConn {
function TestUnitAuthenticateWithTokenAccessor (line 409) | func TestUnitAuthenticateWithTokenAccessor(t *testing.T) {
function TestUnitAuthenticate (line 454) | func TestUnitAuthenticate(t *testing.T) {
function TestUnitAuthenticateSaml (line 538) | func TestUnitAuthenticateSaml(t *testing.T) {
function TestUnitAuthenticateOAuth (line 562) | func TestUnitAuthenticateOAuth(t *testing.T) {
function TestUnitAuthenticatePasscode (line 578) | func TestUnitAuthenticatePasscode(t *testing.T) {
function TestUnitAuthenticateJWT (line 602) | func TestUnitAuthenticateJWT(t *testing.T) {
function TestUnitAuthenticateUsernamePasswordMfa (line 673) | func TestUnitAuthenticateUsernamePasswordMfa(t *testing.T) {
function TestUnitAuthenticateWithConfigMFA (line 702) | func TestUnitAuthenticateWithConfigMFA(t *testing.T) {
function TestMfaParallelLogin (line 725) | func TestMfaParallelLogin(t *testing.T) {
function TestUnitAuthenticateWithConfigOkta (line 776) | func TestUnitAuthenticateWithConfigOkta(t *testing.T) {
function TestUnitAuthenticateWithExternalBrowserParallel (line 806) | func TestUnitAuthenticateWithExternalBrowserParallel(t *testing.T) {
function TestUnitAuthenticateWithConfigExternalBrowserWithFailedSAMLResponse (line 910) | func TestUnitAuthenticateWithConfigExternalBrowserWithFailedSAMLResponse...
function TestUnitAuthenticateExternalBrowser (line 926) | func TestUnitAuthenticateExternalBrowser(t *testing.T) {
function TestUsernamePasswordMfaCaching (line 957) | func TestUsernamePasswordMfaCaching(t *testing.T) {
function TestUsernamePasswordMfaCachingWithPasscode (line 984) | func TestUsernamePasswordMfaCachingWithPasscode(t *testing.T) {
function TestUsernamePasswordMfaCachingWithPasscodeInPassword (line 1012) | func TestUsernamePasswordMfaCachingWithPasscodeInPassword(t *testing.T) {
function TestDisableUsernamePasswordMfaCaching (line 1038) | func TestDisableUsernamePasswordMfaCaching(t *testing.T) {
function TestExternalBrowserCaching (line 1066) | func TestExternalBrowserCaching(t *testing.T) {
function TestDisableExternalBrowserCaching (line 1093) | func TestDisableExternalBrowserCaching(t *testing.T) {
function TestOktaRetryWithNewToken (line 1117) | func TestOktaRetryWithNewToken(t *testing.T) {
function TestContextPropagatedToAuthWhenUsingOpen (line 1150) | func TestContextPropagatedToAuthWhenUsingOpen(t *testing.T) {
function TestContextPropagatedToAuthWhenUsingOpenDB (line 1162) | func TestContextPropagatedToAuthWhenUsingOpenDB(t *testing.T) {
function TestPatSuccessfulFlow (line 1176) | func TestPatSuccessfulFlow(t *testing.T) {
function TestPatTokenRotation (line 1194) | func TestPatTokenRotation(t *testing.T) {
function TestPatInvalidToken (line 1215) | func TestPatInvalidToken(t *testing.T) {
function TestWithOauthAuthorizationCodeFlowManual (line 1232) | func TestWithOauthAuthorizationCodeFlowManual(t *testing.T) {
function TestWithOAuthClientCredentialsFlowManual (line 1272) | func TestWithOAuthClientCredentialsFlowManual(t *testing.T) {
FILE: auth_wif.go
constant awsWif (line 28) | awsWif wifProviderType = "AWS"
constant gcpWif (line 29) | gcpWif wifProviderType = "GCP"
constant azureWif (line 30) | azureWif wifProviderType = "AZURE"
constant oidcWif (line 31) | oidcWif wifProviderType = "OIDC"
constant gcpMetadataFlavorHeaderName (line 33) | gcpMetadataFlavorHeaderName = "Metadata-Flavor"
constant gcpMetadataFlavor (line 34) | gcpMetadataFlavor = "Google"
constant defaultMetadataServiceBase (line 35) | defaultMetadataServiceBase = "http://169.254.169.254"
constant defaultGcpIamCredentialsBase (line 36) | defaultGcpIamCredentialsBase = "https://iamcredentials.googleapis.com"
constant snowflakeAudience (line 37) | snowflakeAudience = "snowflakecomputing.com"
type wifProviderType (line 40) | type wifProviderType
type wifAttestation (line 42) | type wifAttestation struct
type wifAttestationCreator (line 48) | type wifAttestationCreator interface
type wifAttestationProvider (line 52) | type wifAttestationProvider struct
method getAttestation (line 87) | func (p *wifAttestationProvider) getAttestation(identityProvider strin...
function createWifAttestationProvider (line 61) | func createWifAttestationProvider(ctx context.Context, cfg *Config, tele...
type awsAttestastationMetadataProviderFactory (line 102) | type awsAttestastationMetadataProviderFactory
type awsIdentityAttestationCreator (line 104) | type awsIdentityAttestationCreator struct
method createAttestation (line 191) | func (c *awsIdentityAttestationCreator) createAttestation() (*wifAttes...
method createStsRequest (line 256) | func (c *awsIdentityAttestationCreator) createStsRequest(hostname stri...
method signRequestWithSigV4 (line 268) | func (c *awsIdentityAttestationCreator) signRequestWithSigV4(ctx conte...
method createBase64EncodedRequestCredential (line 275) | func (c *awsIdentityAttestationCreator) createBase64EncodedRequestCred...
type gcpIdentityAttestationCreator (line 110) | type gcpIdentityAttestationCreator struct
method createAttestation (line 295) | func (c *gcpIdentityAttestationCreator) createAttestation() (*wifAttes...
method createGcpIdentityTokenFromMetadataService (line 303) | func (c *gcpIdentityAttestationCreator) createGcpIdentityTokenFromMeta...
method createTokenRequest (line 323) | func (c *gcpIdentityAttestationCreator) createTokenRequest() (*http.Re...
method createGcpIdentityViaImpersonation (line 334) | func (c *gcpIdentityAttestationCreator) createGcpIdentityViaImpersonat...
method fetchServiceToken (line 375) | func (c *gcpIdentityAttestationCreator) fetchServiceToken(client *http...
method fetchImpersonatedToken (line 411) | func (c *gcpIdentityAttestationCreator) fetchImpersonatedToken(targetS...
type oidcIdentityAttestationCreator (line 117) | type oidcIdentityAttestationCreator struct
method createAttestation (line 521) | func (c *oidcIdentityAttestationCreator) createAttestation() (*wifAtte...
type awsAttestationMetadataProvider (line 121) | type awsAttestationMetadataProvider interface
type defaultAwsAttestationMetadataProvider (line 127) | type defaultAwsAttestationMetadataProvider struct
method awsCredentials (line 146) | func (s *defaultAwsAttestationMetadataProvider) awsCredentials() (aws....
method awsCredentialsViaRoleChaining (line 150) | func (s *defaultAwsAttestationMetadataProvider) awsCredentialsViaRoleC...
method assumeRole (line 163) | func (s *defaultAwsAttestationMetadataProvider) assumeRole(creds aws.C...
method awsRegion (line 187) | func (s *defaultAwsAttestationMetadataProvider) awsRegion() string {
function createDefaultAwsAttestationMetadataProvider (line 133) | func createDefaultAwsAttestationMetadataProvider(ctx context.Context, cf...
function stsHostname (line 246) | func stsHostname(region string) string {
function fetchTokenFromMetadataService (line 460) | func fetchTokenFromMetadataService(req *http.Request, cfg *Config, telem...
function extractSubIssWithoutVerifyingSignature (line 485) | func extractSubIssWithoutVerifyingSignature(token string) (subject strin...
function extractClaimsMap (line 511) | func extractClaimsMap(token string) (map[string]any, error) {
type azureAttestationMetadataProvider (line 545) | type azureAttestationMetadataProvider interface
type defaultAzureAttestationMetadataProvider (line 551) | type defaultAzureAttestationMetadataProvider struct
method identityEndpoint (line 553) | func (p *defaultAzureAttestationMetadataProvider) identityEndpoint() s...
method identityHeader (line 557) | func (p *defaultAzureAttestationMetadataProvider) identityHeader() str...
method clientID (line 561) | func (p *defaultAzureAttestationMetadataProvider) clientID() string {
type azureIdentityAttestationCreator (line 565) | type azureIdentityAttestationCreator struct
method createAttestation (line 574) | func (a *azureIdentityAttestationCreator) createAttestation() (*wifAtt...
method azureFunctionsIdentityRequest (line 650) | func (a *azureIdentityAttestationCreator) azureFunctionsIdentityReques...
method azureVMIdentityRequest (line 666) | func (a *azureIdentityAttestationCreator) azureVMIdentityRequest() (*h...
function determineEntraResource (line 629) | func determineEntraResource(config *Config) string {
function extractTokenFromJSON (line 637) | func extractTokenFromJSON(tokenJSON string) (string, error) {
FILE: auth_wif_test.go
type mockWifAttestationCreator (line 18) | type mockWifAttestationCreator struct
method createAttestation (line 23) | func (m *mockWifAttestationCreator) createAttestation() (*wifAttestati...
function TestGetAttestation (line 32) | func TestGetAttestation(t *testing.T) {
function TestAwsIdentityAttestationCreator (line 151) | func TestAwsIdentityAttestationCreator(t *testing.T) {
type mockAwsAttestationMetadataProvider (line 354) | type mockAwsAttestationMetadataProvider struct
method awsCredentials (line 368) | func (m *mockAwsAttestationMetadataProvider) awsCredentials() (aws.Cre...
method awsCredentialsViaRoleChaining (line 372) | func (m *mockAwsAttestationMetadataProvider) awsCredentialsViaRoleChai...
method awsRegion (line 382) | func (m *mockAwsAttestationMetadataProvider) awsRegion() string {
function TestGcpIdentityAttestationCreator (line 386) | func TestGcpIdentityAttestationCreator(t *testing.T) {
function TestOidcIdentityAttestationCreator (line 463) | func TestOidcIdentityAttestationCreator(t *testing.T) {
function TestAzureIdentityAttestationCreator (line 554) | func TestAzureIdentityAttestationCreator(t *testing.T) {
type mockAzureAttestationMetadataProvider (line 715) | type mockAzureAttestationMetadataProvider struct
method identityEndpoint (line 721) | func (m *mockAzureAttestationMetadataProvider) identityEndpoint() stri...
method identityHeader (line 725) | func (m *mockAzureAttestationMetadataProvider) identityHeader() string {
method clientID (line 729) | func (m *mockAzureAttestationMetadataProvider) clientID() string {
function azureFunctionsMetadataProvider (line 733) | func azureFunctionsMetadataProvider() *mockAzureAttestationMetadataProvi...
function azureVMMetadataProvider (line 741) | func azureVMMetadataProvider() *mockAzureAttestationMetadataProvider {
function TestWorkloadIdentityAuthOnCloudVM (line 753) | func TestWorkloadIdentityAuthOnCloudVM(t *testing.T) {
FILE: auth_with_external_browser_test.go
function TestExternalBrowserSuccessful (line 14) | func TestExternalBrowserSuccessful(t *testing.T) {
function TestExternalBrowserFailed (line 30) | func TestExternalBrowserFailed(t *testing.T) {
function TestExternalBrowserTimeout (line 48) | func TestExternalBrowserTimeout(t *testing.T) {
function TestExternalBrowserMismatchUser (line 66) | func TestExternalBrowserMismatchUser(t *testing.T) {
function TestClientStoreCredentials (line 87) | func TestClientStoreCredentials(t *testing.T) {
type ExternalBrowserProcessResult (line 130) | type ExternalBrowserProcessResult struct
function cleanupBrowserProcesses (line 146) | func cleanupBrowserProcesses(t *testing.T) {
function provideExternalBrowserCredentials (line 154) | func provideExternalBrowserCredentials(t *testing.T, ExternalBrowserProc...
function verifyConnectionToSnowflakeAuthTests (line 163) | func verifyConnectionToSnowflakeAuthTests(t *testing.T, cfg *Config) (er...
function setupExternalBrowserTest (line 182) | func setupExternalBrowserTest(t *testing.T) *Config {
FILE: auth_with_keypair_test.go
function TestKeypairSuccessful (line 11) | func TestKeypairSuccessful(t *testing.T) {
function TestKeypairInvalidKey (line 19) | func TestKeypairInvalidKey(t *testing.T) {
function setupKeyPairTest (line 28) | func setupKeyPairTest(t *testing.T) *Config {
function loadRsaPrivateKeyForKeyPair (line 36) | func loadRsaPrivateKeyForKeyPair(t *testing.T, envName string) *rsa.Priv...
FILE: auth_with_mfa_test.go
function TestMfaSuccessful (line 12) | func TestMfaSuccessful(t *testing.T) {
function setupMfaTest (line 34) | func setupMfaTest(t *testing.T) *Config {
function getTOPTcodes (line 48) | func getTOPTcodes(t *testing.T) []string {
function verifyConnectionToSnowflakeUsingTotpCodes (line 59) | func verifyConnectionToSnowflakeUsingTotpCodes(t *testing.T, cfg *Config...
FILE: auth_with_oauth_okta_authorization_code_test.go
function TestOauthOktaAuthorizationCodeSuccessful (line 10) | func TestOauthOktaAuthorizationCodeSuccessful(t *testing.T) {
function TestOauthOktaAuthorizationCodeMismatchedUsername (line 26) | func TestOauthOktaAuthorizationCodeMismatchedUsername(t *testing.T) {
function TestOauthOktaAuthorizationCodeOktaTimeout (line 47) | func TestOauthOktaAuthorizationCodeOktaTimeout(t *testing.T) {
function TestOauthOktaAuthorizationCodeUsingTokenCache (line 55) | func TestOauthOktaAuthorizationCodeUsingTokenCache(t *testing.T) {
function setupOauthOktaAuthorizationCodeTest (line 78) | func setupOauthOktaAuthorizationCodeTest(t *testing.T) *Config {
FILE: auth_with_oauth_okta_client_credentials_test.go
function TestOauthOktaClientCredentialsSuccessful (line 9) | func TestOauthOktaClientCredentialsSuccessful(t *testing.T) {
function TestOauthOktaClientCredentialsMismatchedUsername (line 15) | func TestOauthOktaClientCredentialsMismatchedUsername(t *testing.T) {
function TestOauthOktaClientCredentialsUnauthorized (line 25) | func TestOauthOktaClientCredentialsUnauthorized(t *testing.T) {
function setupOauthOktaClientCredentialsTest (line 33) | func setupOauthOktaClientCredentialsTest(t *testing.T) *Config {
FILE: auth_with_oauth_snowflake_authorization_code_test.go
function TestOauthSnowflakeAuthorizationCodeSuccessful (line 10) | func TestOauthSnowflakeAuthorizationCodeSuccessful(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeMismatchedUsername (line 29) | func TestOauthSnowflakeAuthorizationCodeMismatchedUsername(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeTimeout (line 51) | func TestOauthSnowflakeAuthorizationCodeTimeout(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeUsingTokenCache (line 59) | func TestOauthSnowflakeAuthorizationCodeUsingTokenCache(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeWithoutTokenCache (line 85) | func TestOauthSnowflakeAuthorizationCodeWithoutTokenCache(t *testing.T) {
function setupOauthSnowflakeAuthorizationCodeTest (line 114) | func setupOauthSnowflakeAuthorizationCodeTest(t *testing.T) *Config {
function getOauthSnowflakeAuthorizationCodeTestCredentials (line 141) | func getOauthSnowflakeAuthorizationCodeTestCredentials() (*Config, error) {
FILE: auth_with_oauth_snowflake_authorization_code_wildcards_test.go
function TestOauthSnowflakeAuthorizationCodeWildcardsSuccessful (line 10) | func TestOauthSnowflakeAuthorizationCodeWildcardsSuccessful(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeWildcardsMismatchedUsername (line 29) | func TestOauthSnowflakeAuthorizationCodeWildcardsMismatchedUsername(t *t...
function TestOauthSnowflakeAuthorizationWildcardsCodeTimeout (line 51) | func TestOauthSnowflakeAuthorizationWildcardsCodeTimeout(t *testing.T) {
function TestOauthSnowflakeAuthorizationCodeWildcardsWithoutTokenCache (line 59) | func TestOauthSnowflakeAuthorizationCodeWildcardsWithoutTokenCache(t *te...
function setupOauthSnowflakeAuthorizationCodeWildcardsTest (line 88) | func setupOauthSnowflakeAuthorizationCodeWildcardsTest(t *testing.T) *Co...
FILE: auth_with_oauth_test.go
function TestOauthSuccessful (line 12) | func TestOauthSuccessful(t *testing.T) {
function TestOauthInvalidToken (line 21) | func TestOauthInvalidToken(t *testing.T) {
function TestOauthMismatchedUser (line 32) | func TestOauthMismatchedUser(t *testing.T) {
function setupOauthTest (line 46) | func setupOauthTest(t *testing.T) *Config {
function getOauthTestToken (line 54) | func getOauthTestToken(t *testing.T, cfg *Config) (string, error) {
function formData (line 91) | func formData(cfg *Config) url.Values {
type OAuthTokenResponse (line 102) | type OAuthTokenResponse struct
FILE: auth_with_okta_test.go
function TestOktaSuccessful (line 9) | func TestOktaSuccessful(t *testing.T) {
function TestOktaWrongCredentials (line 15) | func TestOktaWrongCredentials(t *testing.T) {
function TestOktaWrongAuthenticator (line 25) | func TestOktaWrongAuthenticator(t *testing.T) {
function setupOktaTest (line 38) | func setupOktaTest(t *testing.T) *Config {
FILE: auth_with_pat_test.go
type PatToken (line 12) | type PatToken struct
function TestEndToEndPatSuccessful (line 17) | func TestEndToEndPatSuccessful(t *testing.T) {
function TestEndToEndPatMismatchedUser (line 26) | func TestEndToEndPatMismatchedUser(t *testing.T) {
function TestEndToEndPatInvalidToken (line 38) | func TestEndToEndPatInvalidToken(t *testing.T) {
function setupEndToEndPatTest (line 47) | func setupEndToEndPatTest(t *testing.T) *Config {
function getEndToEndPatSetupCommandVariables (line 56) | func getEndToEndPatSetupCommandVariables() (*Config, error) {
function createEndToEndPatToken (line 63) | func createEndToEndPatToken(t *testing.T) *PatToken {
function removeEndToEndPatToken (line 83) | func removeEndToEndPatToken(t *testing.T, patTokenName string) {
function connectUsingOktaConnectionAndExecuteCustomCommand (line 99) | func connectUsingOktaConnectionAndExecuteCustomCommand(t *testing.T, cfg...
FILE: authexternalbrowser.go
constant samlSuccessHTML (line 24) | samlSuccessHTML = `<!DOCTYPE html><html><head><meta charset="UTF-8"/>
constant bufSize (line 31) | bufSize = 8192
function buildResponse (line 36) | func buildResponse(body string) (bytes.Buffer, error) {
function createLocalTCPListener (line 56) | func createLocalTCPListener(port int) (*net.TCPListener, error) {
function openBrowser (line 86) | func openBrowser(browserURL string) error {
function getIdpURLProofKey (line 106) | func getIdpURLProofKey(
function getLoginURL (line 164) | func getLoginURL(sr *snowflakeRestful, user string, callbackPort int) (s...
function generateProofKey (line 176) | func generateProofKey() string {
function getTokenFromResponse (line 192) | func getTokenFromResponse(response string) (string, error) {
type authenticateByExternalBrowserResult (line 209) | type authenticateByExternalBrowserResult struct
function authenticateByExternalBrowser (line 215) | func authenticateByExternalBrowser(ctx context.Context, sr *snowflakeRes...
function doAuthenticateByExternalBrowser (line 241) | func doAuthenticateByExternalBrowser(ctx context.Context, sr *snowflakeR...
type samlResponseProvider (line 344) | type samlResponseProvider interface
type externalBrowserSamlResponseProvider (line 348) | type externalBrowserSamlResponseProvider struct
method run (line 351) | func (e externalBrowserSamlResponseProvider) run(url string) error {
FILE: authexternalbrowser_test.go
function TestGetTokenFromResponseFail (line 15) | func TestGetTokenFromResponseFail(t *testing.T) {
function TestGetTokenFromResponse (line 32) | func TestGetTokenFromResponse(t *testing.T) {
function TestBuildResponse (line 54) | func TestBuildResponse(t *testing.T) {
function postAuthExternalBrowserError (line 64) | func postAuthExternalBrowserError(_ context.Context, _ *snowflakeRestful...
function postAuthExternalBrowserErrorDelayed (line 68) | func postAuthExternalBrowserErrorDelayed(_ context.Context, _ *snowflake...
function postAuthExternalBrowserFail (line 73) | func postAuthExternalBrowserFail(_ context.Context, _ *snowflakeRestful,...
function postAuthExternalBrowserFailWithCode (line 80) | func postAuthExternalBrowserFailWithCode(_ context.Context, _ *snowflake...
function TestUnitAuthenticateByExternalBrowser (line 88) | func TestUnitAuthenticateByExternalBrowser(t *testing.T) {
function TestAuthenticationTimeout (line 124) | func TestAuthenticationTimeout(t *testing.T) {
function Test_createLocalTCPListener (line 141) | func Test_createLocalTCPListener(t *testing.T) {
function TestUnitGetLoginURL (line 154) | func TestUnitGetLoginURL(t *testing.T) {
type nonInteractiveSamlResponseProvider (line 180) | type nonInteractiveSamlResponseProvider struct
method run (line 184) | func (provider *nonInteractiveSamlResponseProvider) run(url string) er...
FILE: authokta.go
type authOKTARequest (line 17) | type authOKTARequest struct
type authOKTAResponse (line 22) | type authOKTAResponse struct
function authenticateBySAML (line 53) | func authenticateBySAML(
function postBackURL (line 172) | func postBackURL(htmlData []byte) (url *url.URL, err error) {
function isPrefixEqual (line 190) | func isPrefixEqual(u1 *url.URL, u2 *url.URL) bool {
function postAuthSAML (line 204) | func postAuthSAML(
function postAuthOKTA (line 266) | func postAuthOKTA(
function getSSO (line 312) | func getSSO(
FILE: authokta_test.go
function TestUnitPostBackURL (line 13) | func TestUnitPostBackURL(t *testing.T) {
function TestUnitIsPrefixEqual (line 39) | func TestUnitIsPrefixEqual(t *testing.T) {
function getTestError (line 70) | func getTestError(_ context.Context, _ *snowflakeRestful, _ *url.URL, _ ...
function getTestAppBadGatewayError (line 77) | func getTestAppBadGatewayError(_ context.Context, _ *snowflakeRestful, _...
function getTestHTMLSuccess (line 84) | func getTestHTMLSuccess(_ context.Context, _ *snowflakeRestful, _ *url.U...
function TestUnitPostAuthSAML (line 91) | func TestUnitPostAuthSAML(t *testing.T) {
function TestUnitPostAuthOKTA (line 113) | func TestUnitPostAuthOKTA(t *testing.T) {
function TestUnitGetSSO (line 135) | func TestUnitGetSSO(t *testing.T) {
function postAuthSAMLError (line 161) | func postAuthSAMLError(_ context.Context, _ *snowflakeRestful, _ map[str...
function postAuthSAMLAuthFail (line 165) | func postAuthSAMLAuthFail(_ context.Context, _ *snowflakeRestful, _ map[...
function postAuthSAMLAuthFailWithCode (line 172) | func postAuthSAMLAuthFailWithCode(_ context.Context, _ *snowflakeRestful...
function postAuthSAMLAuthSuccessButInvalidURL (line 180) | func postAuthSAMLAuthSuccessButInvalidURL(_ context.Context, _ *snowflak...
function postAuthSAMLAuthSuccessButInvalidTokenURL (line 191) | func postAuthSAMLAuthSuccessButInvalidTokenURL(_ context.Context, _ *sno...
function postAuthSAMLAuthSuccessButInvalidSSOURL (line 202) | func postAuthSAMLAuthSuccessButInvalidSSOURL(_ context.Context, _ *snowf...
function postAuthSAMLAuthSuccess (line 213) | func postAuthSAMLAuthSuccess(_ context.Context, _ *snowflakeRestful, _ m...
function postAuthOKTAError (line 224) | func postAuthOKTAError(_ context.Context, _ *snowflakeRestful, _ map[str...
function postAuthOKTASuccess (line 228) | func postAuthOKTASuccess(_ context.Context, _ *snowflakeRestful, _ map[s...
function getSSOError (line 232) | func getSSOError(_ context.Context, _ *snowflakeRestful, _ *url.Values, ...
function getSSOSuccessButInvalidURL (line 236) | func getSSOSuccessButInvalidURL(_ context.Context, _ *snowflakeRestful, ...
function getSSOSuccess (line 240) | func getSSOSuccess(_ context.Context, _ *snowflakeRestful, _ *url.Values...
function getSSOSuccessButWrongPrefixURL (line 244) | func getSSOSuccessButWrongPrefixURL(_ context.Context, _ *snowflakeRestf...
function TestUnitAuthenticateBySAML (line 248) | func TestUnitAuthenticateBySAML(t *testing.T) {
function TestDisableSamlURLCheck (line 327) | func TestDisableSamlURLCheck(t *testing.T) {
FILE: azure_storage_client.go
type snowflakeAzureClient (line 26) | type snowflakeAzureClient struct
method createClient (line 44) | func (util *snowflakeAzureClient) createClient(info *execResponseStage...
method getFileHeader (line 72) | func (util *snowflakeAzureClient) getFileHeader(ctx context.Context, m...
method uploadFile (line 152) | func (util *snowflakeAzureClient) uploadFile(
method nativeDownloadFile (line 281) | func (util *snowflakeAzureClient) nativeDownloadFile(
method extractContainerNameAndPath (line 353) | func (util *snowflakeAzureClient) extractContainerNameAndPath(location...
method detectAzureTokenExpireError (line 371) | func (util *snowflakeAzureClient) detectAzureTokenExpireError(resp *ht...
type azureLocation (line 31) | type azureLocation struct
type azureAPI (line 36) | type azureAPI interface
function computeMD5ForFile (line 387) | func computeMD5ForFile(f *os.File) ([]byte, error) {
function createContainerClient (line 398) | func createContainerClient(clientURL string, cfg *Config, telemetry *sno...
FILE: azure_storage_client_test.go
function TestExtractContainerNameAndPath (line 20) | func TestExtractContainerNameAndPath(t *testing.T) {
function TestUnitDetectAzureTokenExpireError (line 45) | func TestUnitDetectAzureTokenExpireError(t *testing.T) {
type azureObjectAPIMock (line 110) | type azureObjectAPIMock struct
method UploadStream (line 118) | func (c *azureObjectAPIMock) UploadStream(ctx context.Context, body io...
method UploadFile (line 122) | func (c *azureObjectAPIMock) UploadFile(ctx context.Context, file *os....
method GetProperties (line 126) | func (c *azureObjectAPIMock) GetProperties(ctx context.Context, o *blo...
method DownloadFile (line 130) | func (c *azureObjectAPIMock) DownloadFile(ctx context.Context, file *o...
method DownloadStream (line 134) | func (c *azureObjectAPIMock) DownloadStream(ctx context.Context, o *bl...
function TestUploadFileWithAzureUploadFailedError (line 138) | func TestUploadFileWithAzureUploadFailedError(t *testing.T) {
function TestUploadStreamWithAzureUploadFailedError (line 200) | func TestUploadStreamWithAzureUploadFailedError(t *testing.T) {
function TestUploadFileWithAzureUploadTokenExpired (line 254) | func TestUploadFileWithAzureUploadTokenExpired(t *testing.T) {
function TestUploadFileWithAzureUploadNeedsRetry (line 331) | func TestUploadFileWithAzureUploadNeedsRetry(t *testing.T) {
function TestDownloadOneFileToAzureFailed (line 408) | func TestDownloadOneFileToAzureFailed(t *testing.T) {
function TestGetFileHeaderErrorStatus (line 456) | func TestGetFileHeaderErrorStatus(t *testing.T) {
function TestUploadFileToAzureClientCastFail (line 555) | func TestUploadFileToAzureClientCastFail(t *testing.T) {
function TestUploadFileToAzureSetsBlobContentMD5 (line 603) | func TestUploadFileToAzureSetsBlobContentMD5(t *testing.T) {
function TestUploadStreamToAzureSetsBlobContentMD5 (line 671) | func TestUploadStreamToAzureSetsBlobContentMD5(t *testing.T) {
function TestAzureGetHeaderClientCastFail (line 726) | func TestAzureGetHeaderClientCastFail(t *testing.T) {
FILE: bind_uploader.go
constant bindStageName (line 19) | bindStageName = "SYSTEM$BIND"
constant createTemporaryStageStmt (line 20) | createTemporaryStageStmt = "CREATE OR REPLACE TEMPORARY STAGE " + bindSt...
constant inputStreamBufferSize (line 24) | inputStreamBufferSize = 1024 * 1024 * 10
type bindUploader (line 27) | type bindUploader struct
method upload (line 47) | func (bu *bindUploader) upload(bindings []driver.NamedValue) (*execRes...
method uploadStreamInternal (line 78) | func (bu *bindUploader) uploadStreamInternal(
method createStageIfNeeded (line 105) | func (bu *bindUploader) createStageIfNeeded() error {
method buildRowsAsBytes (line 132) | func (bu *bindUploader) buildRowsAsBytes(columns []driver.NamedValue) ...
method createCSVRecord (line 187) | func (bu *bindUploader) createCSVRecord(data []any) []byte {
type bindingSchema (line 35) | type bindingSchema struct
type bindingValue (line 41) | type bindingValue struct
method processBindings (line 205) | func (sc *snowflakeConn) processBindings(
function getBindValues (line 238) | func getBindValues(bindings []driver.NamedValue, params *syncParams) (ma...
function bindingName (line 290) | func bindingName(nv driver.NamedValue, idx int) string {
function arrayBindValueCount (line 297) | func arrayBindValueCount(bindValues []driver.NamedValue) (int, error) {
function isArrayBind (line 309) | func isArrayBind(bindings []driver.NamedValue) bool {
function supportedArrayBind (line 321) | func supportedArrayBind(nv *driver.NamedValue) bool {
function supportedDecfloatBind (line 353) | func supportedDecfloatBind(nv *driver.NamedValue) bool {
function supportedNullBind (line 367) | func supportedNullBind(nv *driver.NamedValue) bool {
function supportedStructuredObjectWriterBind (line 376) | func supportedStructuredObjectWriterBind(nv *driver.NamedValue) bool {
function supportedStructuredArrayBind (line 384) | func supportedStructuredArrayBind(nv *driver.NamedValue) bool {
function supportedStructuredMapBind (line 389) | func supportedStructuredMapBind(nv *driver.NamedValue) bool {
FILE: bindings_test.go
constant createTableSQL (line 22) | createTableSQL = `create or replace table test_prep_statement(c1 INTEGER,
constant deleteTableSQL (line 25) | deleteTableSQL = "drop table if exists TEST_PREP_STATEMENT"
constant insertSQL (line 26) | insertSQL = "insert into TEST_PREP_STATEMENT values(?, ?, ?, ?, ?, ...
constant selectAllSQL (line 27) | selectAllSQL = "select * from TEST_PREP_STATEMENT ORDER BY 1"
constant createTableSQLBulkArray (line 29) | createTableSQLBulkArray = `create or replace table test_bulk_array(c1 IN...
constant deleteTableSQLBulkArray (line 31) | deleteTableSQLBulkArray = "drop table if exists test_bulk_array"
constant insertSQLBulkArray (line 32) | insertSQLBulkArray = "insert into test_bulk_array values(?, ?, ?, ?...
constant selectAllSQLBulkArray (line 33) | selectAllSQLBulkArray = "select * from test_bulk_array ORDER BY 1"
constant createTableSQLBulkArrayDateTimeTimestamp (line 35) | createTableSQLBulkArrayDateTimeTimestamp = `create or replace table test...
constant deleteTableSQLBulkArrayDateTimeTimestamp (line 37) | deleteTableSQLBulkArrayDateTimeTimestamp = "drop table if exists test_bu...
constant insertSQLBulkArrayDateTimeTimestamp (line 38) | insertSQLBulkArrayDateTimeTimestamp = "insert into test_bulk_array_...
constant selectAllSQLBulkArrayDateTimeTimestamp (line 39) | selectAllSQLBulkArrayDateTimeTimestamp = "select * from test_bulk_arra...
constant enableFeatureMaxLOBSize (line 41) | enableFeatureMaxLOBSize = "ALTER SESSION SET FEATURE_INCREASED_MAX_...
constant unsetFeatureMaxLOBSize (line 42) | unsetFeatureMaxLOBSize = "ALTER SESSION UNSET FEATURE_INCREASED_MA...
constant enableLargeVarcharAndBinary (line 43) | enableLargeVarcharAndBinary = "ALTER SESSION SET ENABLE_LARGE_VARCHAR_A...
constant disableLargeVarcharAndBinary (line 44) | disableLargeVarcharAndBinary = "ALTER SESSION SET ENABLE_LARGE_VARCHAR_A...
constant unsetLargeVarcharAndBinary (line 45) | unsetLargeVarcharAndBinary = "ALTER SESSION UNSET ENABLE_LARGE_VARCHAR...
constant smallSize (line 47) | smallSize = 16 * 1024 * 1024
constant largeSize (line 48) | largeSize = 64 * 1024 * 1024
constant lobRandomRange (line 50) | lobRandomRange = 100000
function TestBindingFloat64 (line 53) | func TestBindingFloat64(t *testing.T) {
function TestBindingUint64 (line 83) | func TestBindingUint64(t *testing.T) {
function TestBindingDateTimeTimestamp (line 96) | func TestBindingDateTimeTimestamp(t *testing.T) {
function TestBindingBinary (line 167) | func TestBindingBinary(t *testing.T) {
function TestBindingTimestampTZ (line 189) | func TestBindingTimestampTZ(t *testing.T) {
function TestBindingTimePtrInStruct (line 222) | func TestBindingTimePtrInStruct(t *testing.T) {
function TestBindingTimeInStruct (line 271) | func TestBindingTimeInStruct(t *testing.T) {
function TestBindingInterface (line 319) | func TestBindingInterface(t *testing.T) {
function TestBindingInterfaceString (line 351) | func TestBindingInterfaceString(t *testing.T) {
function TestBulkArrayBindingUUID (line 383) | func TestBulkArrayBindingUUID(t *testing.T) {
function TestBulkArrayBindingInterfaceNil (line 453) | func TestBulkArrayBindingInterfaceNil(t *testing.T) {
function TestBulkArrayBindingInterface (line 520) | func TestBulkArrayBindingInterface(t *testing.T) {
function TestBulkArrayBindingInterfaceDateTimeTimestamp (line 617) | func TestBulkArrayBindingInterfaceDateTimeTimestamp(t *testing.T) {
function TestBindingArray (line 717) | func TestBindingArray(t *testing.T) {
function TestBindingBulkArray (line 723) | func TestBindingBulkArray(t *testing.T) {
function testBindingArray (line 730) | func testBindingArray(t *testing.T, bulk bool) {
function TestBulkArrayBinding (line 819) | func TestBulkArrayBinding(t *testing.T) {
function TestSupportedDecfloatBind (line 875) | func TestSupportedDecfloatBind(t *testing.T) {
function TestBindingsWithSameValue (line 939) | func TestBindingsWithSameValue(t *testing.T) {
function TestBulkArrayBindingTimeWithPrecision (line 1063) | func TestBulkArrayBindingTimeWithPrecision(t *testing.T) {
function TestBulkArrayMultiPartBinding (line 1104) | func TestBulkArrayMultiPartBinding(t *testing.T) {
function TestBulkArrayMultiPartBindingInt (line 1151) | func TestBulkArrayMultiPartBindingInt(t *testing.T) {
function TestBulkArrayMultiPartBindingWithNull (line 1188) | func TestBulkArrayMultiPartBindingWithNull(t *testing.T) {
function TestFunctionParameters (line 1252) | func TestFunctionParameters(t *testing.T) {
function TestVariousBindingModes (line 1330) | func TestVariousBindingModes(t *testing.T) {
function skipMaxLobSizeTestOnGithubActions (line 1416) | func skipMaxLobSizeTestOnGithubActions(t *testing.T) {
function TestLOBRetrievalWithArrow (line 1422) | func TestLOBRetrievalWithArrow(t *testing.T) {
function TestLOBRetrievalWithJSON (line 1426) | func TestLOBRetrievalWithJSON(t *testing.T) {
function testLOBRetrieval (line 1430) | func testLOBRetrieval(t *testing.T, useArrowFormat bool) {
function TestMaxLobSize (line 1460) | func TestMaxLobSize(t *testing.T) {
function TestInsertLobDataWithLiteralArrow (line 1483) | func TestInsertLobDataWithLiteralArrow(t *testing.T) {
function TestInsertLobDataWithLiteralJSON (line 1489) | func TestInsertLobDataWithLiteralJSON(t *testing.T) {
function TestInsertLobDataWithBindingsArrow (line 1495) | func TestInsertLobDataWithBindingsArrow(t *testing.T) {
function TestInsertLobDataWithBindingsJSON (line 1501) | func TestInsertLobDataWithBindingsJSON(t *testing.T) {
function testInsertLOBData (line 1507) | func testInsertLOBData(t *testing.T, useArrowFormat bool, isLiteral bool) {
function fastStringGeneration (line 1600) | func fastStringGeneration(size int) string {
function getRandomDate (line 1628) | func getRandomDate() time.Time {
function getRandomBool (line 1632) | func getRandomBool() bool {
FILE: chunk.go
constant defaultChunkBufferSize (line 14) | defaultChunkBufferSize int64 = 8 << 10
constant defaultStringBufferSize (line 15) | defaultStringBufferSize int64 = 512
type largeChunkDecoder (line 18) | type largeChunkDecoder struct
method mkError (line 53) | func (lcd *largeChunkDecoder) mkError(s string) error {
method decode (line 57) | func (lcd *largeChunkDecoder) decode() ([][]*string, error) {
method decodeRow (line 88) | func (lcd *largeChunkDecoder) decodeRow() ([]*string, error) {
method decodeCell (line 119) | func (lcd *largeChunkDecoder) decodeCell() (*string, error) {
method decodeString (line 139) | func (lcd *largeChunkDecoder) decodeString() (string, error) {
method decodeEscaped (line 163) | func (lcd *largeChunkDecoder) decodeEscaped() error {
method readRune (line 204) | func (lcd *largeChunkDecoder) readRune() rune {
method getu4WithPrefix (line 212) | func (lcd *largeChunkDecoder) getu4WithPrefix() (rune, int) {
method getu4 (line 230) | func (lcd *largeChunkDecoder) getu4() rune {
method nextByteNonWhitespace (line 249) | func (lcd *largeChunkDecoder) nextByteNonWhitespace() byte {
method rewind (line 261) | func (lcd *largeChunkDecoder) rewind(n int) {
method nextByte (line 266) | func (lcd *largeChunkDecoder) nextByte() byte {
method ensureBytes (line 286) | func (lcd *largeChunkDecoder) ensureBytes(n int) {
method fillBuffer (line 301) | func (lcd *largeChunkDecoder) fillBuffer(b []byte) int {
function decodeLargeChunk (line 33) | func decodeLargeChunk(r io.Reader, rowCount int, cellCount int) ([][]*st...
FILE: chunk_downloader.go
type chunkDownloader (line 30) | type chunkDownloader interface
type snowflakeChunkDownloader (line 43) | type snowflakeChunkDownloader struct
method totalUncompressedSize (line 74) | func (scd *snowflakeChunkDownloader) totalUncompressedSize() (acc int6...
method start (line 81) | func (scd *snowflakeChunkDownloader) start() error {
method schedule (line 149) | func (scd *snowflakeChunkDownloader) schedule() {
method checkErrorRetry (line 171) | func (scd *snowflakeChunkDownloader) checkErrorRetry() error {
method next (line 200) | func (scd *snowflakeChunkDownloader) next() (chunkRowType, error) {
method reset (line 247) | func (scd *snowflakeChunkDownloader) reset() {
method getChunkMetas (line 251) | func (scd *snowflakeChunkDownloader) getChunkMetas() []query.ExecRespo...
method getQueryResultFormat (line 255) | func (scd *snowflakeChunkDownloader) getQueryResultFormat() resultForm...
method setNextChunkDownloader (line 259) | func (scd *snowflakeChunkDownloader) setNextChunkDownloader(nextDownlo...
method getNextChunkDownloader (line 263) | func (scd *snowflakeChunkDownloader) getNextChunkDownloader() chunkDow...
method getRowType (line 267) | func (scd *snowflakeChunkDownloader) getRowType() []query.ExecResponse...
method getRawArrowBatches (line 278) | func (scd *snowflakeChunkDownloader) getRawArrowBatches() []*rawArrowB...
method releaseRawArrowBatches (line 288) | func (scd *snowflakeChunkDownloader) releaseRawArrowBatches() {
method getConfigParams (line 304) | func (scd *snowflakeChunkDownloader) getConfigParams() (*syncParams, e...
method startArrowBatches (line 326) | func (scd *snowflakeChunkDownloader) startArrowBatches() error {
type rawArrowBatchData (line 272) | type rawArrowBatchData struct
function getChunk (line 311) | func getChunk(
type largeResultSetReader (line 362) | type largeResultSetReader struct
method Read (line 367) | func (r *largeResultSetReader) Read(p []byte) (n int, err error) {
function downloadChunk (line 394) | func downloadChunk(ctx context.Context, scd *snowflakeChunkDownloader, i...
function downloadChunkHelper (line 410) | func downloadChunkHelper(ctx context.Context, scd *snowflakeChunkDownloa...
function decodeChunk (line 452) | func decodeChunk(ctx context.Context, scd *snowflakeChunkDownloader, idx...
function populateJSONRowSet (line 543) | func populateJSONRowSet(dst []chunkRowType, src [][]*string) {
function countRawArrowBatchRows (line 550) | func countRawArrowBatchRows(recs *[]arrow.Record) (cnt int) {
function getAllocator (line 560) | func getAllocator(ctx context.Context) memory.Allocator {
function usesArrowBatches (line 568) | func usesArrowBatches(ctx context.Context) bool {
FILE: chunk_downloader_test.go
function TestChunkDownloaderDoesNotStartWhenArrowParsingCausesError (line 11) | func TestChunkDownloaderDoesNotStartWhenArrowParsingCausesError(t *testi...
function TestWithArrowBatchesWhenQueryReturnsNoRowsWhenUsingNativeGoSQLInterface (line 33) | func TestWithArrowBatchesWhenQueryReturnsNoRowsWhenUsingNativeGoSQLInter...
function TestWithArrowBatchesWhenQueryReturnsRowsAndReadingRows (line 46) | func TestWithArrowBatchesWhenQueryReturnsRowsAndReadingRows(t *testing.T) {
function TestWithArrowBatchesWhenQueryReturnsNoRowsAndReadingRows (line 54) | func TestWithArrowBatchesWhenQueryReturnsNoRowsAndReadingRows(t *testing...
function TestWithArrowBatchesWhenQueryReturnsNoRowsAndReadingArrowBatchData (line 62) | func TestWithArrowBatchesWhenQueryReturnsNoRowsAndReadingArrowBatchData(...
FILE: chunk_test.go
function TestBadChunkData (line 22) | func TestBadChunkData(t *testing.T) {
function TestValidChunkData (line 49) | func TestValidChunkData(t *testing.T) {
function TestSmallBufferChunkData (line 95) | func TestSmallBufferChunkData(t *testing.T) {
function TestEnsureBytes (line 116) | func TestEnsureBytes(t *testing.T) {
function testDecodeOk (line 136) | func testDecodeOk(t *testing.T, s string) {
function testDecodeErr (line 172) | func testDecodeErr(t *testing.T, s string) {
function TestEnableArrowBatches (line 178) | func TestEnableArrowBatches(t *testing.T) {
function TestWithArrowBatchesAsync (line 254) | func TestWithArrowBatchesAsync(t *testing.T) {
function TestWithArrowBatchesButReturningJSON (line 330) | func TestWithArrowBatchesButReturningJSON(t *testing.T) {
function TestWithArrowBatchesButReturningJSONAsync (line 334) | func TestWithArrowBatchesButReturningJSONAsync(t *testing.T) {
function testWithArrowBatchesButReturningJSON (line 338) | func testWithArrowBatchesButReturningJSON(t *testing.T, async bool) {
function TestWithArrowBatchesMultistatement (line 366) | func TestWithArrowBatchesMultistatement(t *testing.T) {
function TestWithArrowBatchesMultistatementAsync (line 370) | func TestWithArrowBatchesMultistatementAsync(t *testing.T) {
function testWithArrowBatchesMultistatement (line 374) | func testWithArrowBatchesMultistatement(t *testing.T, async bool) {
function TestWithArrowBatchesMultistatementWithJSONResponse (line 408) | func TestWithArrowBatchesMultistatementWithJSONResponse(t *testing.T) {
function TestWithArrowBatchesMultistatementWithLargeResultSet (line 433) | func TestWithArrowBatchesMultistatementWithLargeResultSet(t *testing.T) {
function TestQueryArrowStream (line 471) | func TestQueryArrowStream(t *testing.T) {
function TestQueryArrowStreamDescribeOnly (line 490) | func TestQueryArrowStreamDescribeOnly(t *testing.T) {
function TestRetainChunkWOHighPrecision (line 508) | func TestRetainChunkWOHighPrecision(t *testing.T) {
function TestQueryArrowStreamMultiStatement (line 541) | func TestQueryArrowStreamMultiStatement(t *testing.T) {
function TestQueryArrowStreamMultiStatementForJSONData (line 552) | func TestQueryArrowStreamMultiStatementForJSONData(t *testing.T) {
FILE: ci/scripts/hang_webserver.py
class HTTPRequestHandler (line 9) | class HTTPRequestHandler(BaseHTTPRequestHandler):
method do_POST (line 12) | def do_POST(self):
method ocspMocks (line 25) | def ocspMocks(self):
method authMocks (line 45) | def authMocks(self):
method __respond (line 65) | def __respond(self, http_code, content_type='application/json', body=N...
class ThreadedHTTPServer (line 76) | class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
method shutdown (line 79) | def shutdown(self):
class SimpleHttpServer (line 83) | class SimpleHttpServer():
method __init__ (line 84) | def __init__(self, ip, port):
method start (line 87) | def start(self):
method waitForThread (line 92) | def waitForThread(self):
method stop (line 95) | def stop(self):
FILE: client.go
type InternalClient (line 12) | type InternalClient interface
type httpClient (line 17) | type httpClient struct
method Get (line 21) | func (cli *httpClient) Get(
method Post (line 29) | func (cli *httpClient) Post(
FILE: client_configuration.go
constant levelOff (line 15) | levelOff string = "OFF"
constant levelError (line 16) | levelError string = "ERROR"
constant levelWarn (line 17) | levelWarn string = "WARN"
constant levelInfo (line 18) | levelInfo string = "INFO"
constant levelDebug (line 19) | levelDebug string = "DEBUG"
constant levelTrace (line 20) | levelTrace string = "TRACE"
constant defaultConfigName (line 24) | defaultConfigName = "sf_client_config.json"
constant clientConfEnvName (line 25) | clientConfEnvName = "SF_CLIENT_CONFIG_FILE"
function getClientConfig (line 28) | func getClientConfig(filePathFromConnectionString string) (*ClientConfig...
function findClientConfigFilePath (line 41) | func findClientConfigFilePath(filePathFromConnectionString string, confi...
function searchForConfigFile (line 54) | func searchForConfigFile(directories []string) (string, error) {
function existsFile (line 71) | func existsFile(filePath string) (bool, error) {
function clientConfigPredefinedDirs (line 82) | func clientConfigPredefinedDirs() []string {
type ClientConfig (line 103) | type ClientConfig struct
type ClientConfigCommonProps (line 108) | type ClientConfigCommonProps struct
function parseClientConfiguration (line 113) | func parseClientConfiguration(filePath string) (*ClientConfig, error) {
function getUnknownValues (line 141) | func getUnknownValues(fileContents []byte) map[string]any {
function parsingClientConfigError (line 160) | func parsingClientConfigError(err error) error {
function validateClientConfiguration (line 164) | func validateClientConfiguration(clientConfig *ClientConfig) error {
function validateLogLevel (line 174) | func validateLogLevel(clientConfig ClientConfig) error {
function toLogLevel (line 185) | func toLogLevel(logLevelString string) (string, error) {
FILE: client_configuration_test.go
function TestFindConfigFileFromConnectionParameters (line 12) | func TestFindConfigFileFromConnectionParameters(t *testing.T) {
function TestFindConfigFileFromEnvVariable (line 26) | func TestFindConfigFileFromEnvVariable(t *testing.T) {
function TestFindConfigFileFromFirstPredefinedDir (line 39) | func TestFindConfigFileFromFirstPredefinedDir(t *testing.T) {
function TestFindConfigFileFromSubsequentDirectoryIfNotFoundInPreviousOne (line 50) | func TestFindConfigFileFromSubsequentDirectoryIfNotFoundInPreviousOne(t ...
function TestNotFindConfigFileWhenNotDefined (line 61) | func TestNotFindConfigFileWhenNotDefined(t *testing.T) {
function TestCreatePredefinedDirs (line 72) | func TestCreatePredefinedDirs(t *testing.T) {
function TestGetClientConfig (line 86) | func TestGetClientConfig(t *testing.T) {
function TestNoResultForGetClientConfigWhenNoFileFound (line 101) | func TestNoResultForGetClientConfigWhenNoFileFound(t *testing.T) {
function TestParseConfiguration (line 108) | func TestParseConfiguration(t *testing.T) {
function TestParseAllLogLevels (line 154) | func TestParseAllLogLevels(t *testing.T) {
function TestParseConfigurationFails (line 174) | func TestParseConfigurationFails(t *testing.T) {
function TestUnknownValues (line 232) | func TestUnknownValues(t *testing.T) {
function TestConfigFileOpenSymlinkFail (line 302) | func TestConfigFileOpenSymlinkFail(t *testing.T) {
function createFile (line 317) | func createFile(t *testing.T, fileName string, fileContents string, dire...
function createTestDirectories (line 324) | func createTestDirectories(t *testing.T) struct {
function predefinedTestDirs (line 347) | func predefinedTestDirs(dirs struct {
function createClientConfigContent (line 355) | func createClientConfigContent(logLevel string, logPath string) string {
FILE: client_test.go
type DummyTransport (line 10) | type DummyTransport struct
method RoundTrip (line 15) | func (t *DummyTransport) RoundTrip(r *http.Request) (*http.Response, e...
function TestInternalClient (line 28) | func TestInternalClient(t *testing.T) {
FILE: cmd/arrow/transform_batches_to_rows/transform_batches_to_rows.go
function main (line 16) | func main() {
FILE: cmd/logger/logger.go
function main (line 10) | func main() {
FILE: cmd/mfa/mfa.go
function main (line 12) | func main() {
FILE: cmd/programmatic_access_token/pat.go
function main (line 13) | func main() {
FILE: cmd/variant/insertvariantobject.go
function main (line 16) | func main() {
FILE: connection.go
constant httpHeaderContentType (line 25) | httpHeaderContentType = "Content-Type"
constant httpHeaderAccept (line 26) | httpHeaderAccept = "accept"
constant httpHeaderUserAgent (line 27) | httpHeaderUserAgent = "User-Agent"
constant httpHeaderServiceName (line 28) | httpHeaderServiceName = "X-Snowflake-Service"
constant httpHeaderContentLength (line 29) | httpHeaderContentLength = "Content-Length"
constant httpHeaderHost (line 30) | httpHeaderHost = "Host"
constant httpHeaderValueOctetStream (line 31) | httpHeaderValueOctetStream = "application/octet-stream"
constant httpHeaderContentEncoding (line 32) | httpHeaderContentEncoding = "Content-Encoding"
constant httpClientAppID (line 33) | httpClientAppID = "CLIENT_APP_ID"
constant httpClientAppVersion (line 34) | httpClientAppVersion = "CLIENT_APP_VERSION"
constant statementTypeIDSelect (line 38) | statementTypeIDSelect = int64(0x1000)
constant statementTypeIDDml (line 39) | statementTypeIDDml = int64(0x3000)
constant statementTypeIDMultiTableInsert (line 40) | statementTypeIDMultiTableInsert = statementTypeIDDml + int64(0x500)
constant statementTypeIDMultistatement (line 41) | statementTypeIDMultistatement = int64(0xA000)
constant sessionClientSessionKeepAlive (line 45) | sessionClientSessionKeepAlive = "client_session_keep_a...
constant sessionClientSessionKeepAliveHeartbeatFrequency (line 46) | sessionClientSessionKeepAliveHeartbeatFrequency = "client_session_keep_a...
constant sessionClientValidateDefaultParameters (line 47) | sessionClientValidateDefaultParameters = "CLIENT_VALIDATE_DEFAU...
constant sessionArrayBindStageThreshold (line 48) | sessionArrayBindStageThreshold = "client_stage_array_bi...
constant serviceName (line 49) | serviceName = "service_name"
type resultType (line 52) | type resultType
constant snowflakeResultType (line 55) | snowflakeResultType ContextKey = "snowflakeResultType"
constant execResultType (line 56) | execResultType resultType = "exec"
constant queryResultType (line 57) | queryResultType resultType = "query"
type execKey (line 60) | type execKey
constant executionType (line 63) | executionType execKey = "executionType"
constant executionTypeStatement (line 64) | executionTypeStatement string = "statement"
type snowflakeConn (line 70) | type snowflakeConn struct
method exec (line 89) | func (sc *snowflakeConn) exec(
method Begin (line 251) | func (sc *snowflakeConn) Begin() (driver.Tx, error) {
method BeginTx (line 255) | func (sc *snowflakeConn) BeginTx(
method cleanup (line 286) | func (sc *snowflakeConn) cleanup() {
method Close (line 294) | func (sc *snowflakeConn) Close() (err error) {
method PrepareContext (line 315) | func (sc *snowflakeConn) PrepareContext(
method ExecContext (line 330) | func (sc *snowflakeConn) ExecContext(
method QueryContext (line 396) | func (sc *snowflakeConn) QueryContext(
method queryContextInternal (line 419) | func (sc *snowflakeConn) queryContextInternal(
method Prepare (line 475) | func (sc *snowflakeConn) Prepare(query string) (driver.Stmt, error) {
method Exec (line 479) | func (sc *snowflakeConn) Exec(
method Query (line 486) | func (sc *snowflakeConn) Query(
method Ping (line 493) | func (sc *snowflakeConn) Ping(ctx context.Context) error {
method CheckNamedValue (line 509) | func (sc *snowflakeConn) CheckNamedValue(nv *driver.NamedValue) error {
method GetQueryStatus (line 516) | func (sc *snowflakeConn) GetQueryStatus(
method AddTelemetryData (line 535) | func (sc *snowflakeConn) AddTelemetryData(_ context.Context, eventDate...
method QueryArrowStream (line 545) | func (sc *snowflakeConn) QueryArrowStream(ctx context.Context, query s...
function extractQueryContext (line 224) | func extractQueryContext(data *execResponse) (queryContext, error) {
function buildQueryContext (line 230) | func buildQueryContext(qcc *queryContextCache) (requestQueryContext, err...
function buildSnowflakeConn (line 598) | func buildSnowflakeConn(ctx context.Context, config Config) (*snowflakeC...
FILE: connection_configuration_test.go
function TestTomlConnection (line 12) | func TestTomlConnection(t *testing.T) {
FILE: connection_test.go
constant serviceNameStub (line 27) | serviceNameStub = "SV"
constant serviceNameAppend (line 28) | serviceNameAppend = "a"
function TestInvalidConnection (line 31) | func TestInvalidConnection(t *testing.T) {
function postQueryMock (line 53) | func postQueryMock(_ context.Context, _ *snowflakeRestful, _ *url.Values,
function TestExecWithEmptyRequestID (line 74) | func TestExecWithEmptyRequestID(t *testing.T) {
function TestGetQueryResultUsesTokenFromTokenAccessor (line 106) | func TestGetQueryResultUsesTokenFromTokenAccessor(t *testing.T) {
function TestGetQueryResultTokenExpiry (line 145) | func TestGetQueryResultTokenExpiry(t *testing.T) {
function TestGetQueryResultTokenNotSet (line 196) | func TestGetQueryResultTokenNotSet(t *testing.T) {
function TestCheckNamedValue (line 245) | func TestCheckNamedValue(t *testing.T) {
function TestExecWithSpecificRequestID (line 311) | func TestExecWithSpecificRequestID(t *testing.T) {
function TestExecContextPropagationIntegrationTest (line 344) | func TestExecContextPropagationIntegrationTest(t *testing.T) {
function TestServiceName (line 400) | func TestServiceName(t *testing.T) {
function closeSessionMock (line 433) | func closeSessionMock(_ context.Context, _ *snowflakeRestful, _ time.Dur...
function TestCloseIgnoreSessionGone (line 440) | func TestCloseIgnoreSessionGone(t *testing.T) {
function TestClientSessionPersist (line 455) | func TestClientSessionPersist(t *testing.T) {
function TestFetchResultByQueryID (line 474) | func TestFetchResultByQueryID(t *testing.T) {
function TestFetchRunningQueryByID (line 516) | func TestFetchRunningQueryByID(t *testing.T) {
function TestFetchErrorQueryByID (line 558) | func TestFetchErrorQueryByID(t *testing.T) {
function TestFetchMalformedJsonQueryByID (line 595) | func TestFetchMalformedJsonQueryByID(t *testing.T) {
function TestIsPrivateLink (line 631) | func TestIsPrivateLink(t *testing.T) {
function TestBuildPrivatelinkConn (line 653) | func TestBuildPrivatelinkConn(t *testing.T) {
function TestOcspAddressesSetup (line 664) | func TestOcspAddressesSetup(t *testing.T) {
function TestGetQueryStatus (line 712) | func TestGetQueryStatus(t *testing.T) {
function TestAddTelemetryDataViaSnowflakeConnection (line 741) | func TestAddTelemetryDataViaSnowflakeConnection(t *testing.T) {
function TestConfigureTelemetry (line 759) | func TestConfigureTelemetry(t *testing.T) {
function TestGetInvalidQueryStatus (line 784) | func TestGetInvalidQueryStatus(t *testing.T) {
function TestExecWithServerSideError (line 795) | func TestExecWithServerSideError(t *testing.T) {
function TestConcurrentReadOnParams (line 831) | func TestConcurrentReadOnParams(t *testing.T) {
function postQueryTest (line 885) | func postQueryTest(_ context.Context, _ *snowflakeRestful, _ *url.Values...
function postQueryFail (line 889) | func postQueryFail(_ context.Context, _ *snowflakeRestful, _ *url.Values...
function TestErrorReportingOnConcurrentFails (line 902) | func TestErrorReportingOnConcurrentFails(t *testing.T) {
function executeQueryAndConfirmMessage (line 916) | func executeQueryAndConfirmMessage(db *sql.DB, query string, expectedErr...
function TestQueryArrowStreamError (line 926) | func TestQueryArrowStreamError(t *testing.T) {
function TestExecContextError (line 953) | func TestExecContextError(t *testing.T) {
function TestQueryContextError (line 975) | func TestQueryContextError(t *testing.T) {
function TestPrepareQuery (line 1000) | func TestPrepareQuery(t *testing.T) {
function TestBeginCreatesTransaction (line 1010) | func TestBeginCreatesTransaction(t *testing.T) {
type EmptyTransporter (line 1019) | type EmptyTransporter struct
method RoundTrip (line 1021) | func (t EmptyTransporter) RoundTrip(*http.Request) (*http.Response, er...
function castToTransport (line 1027) | func castToTransport(rt http.RoundTripper) *http.Transport {
function TestGetTransport (line 1034) | func TestGetTransport(t *testing.T) {
function TestGetCRLTransport (line 1094) | func TestGetCRLTransport(t *testing.T) {
FILE: connection_util.go
method isClientSessionKeepAliveEnabled (line 17) | func (sc *snowflakeConn) isClientSessionKeepAliveEnabled() bool {
method getClientSessionKeepAliveHeartbeatFrequency (line 25) | func (sc *snowflakeConn) getClientSessionKeepAliveHeartbeatFrequency() (...
method startHeartBeat (line 41) | func (sc *snowflakeConn) startHeartBeat() {
method stopHeartBeat (line 56) | func (sc *snowflakeConn) stopHeartBeat() {
method getArrayBindStageThreshold (line 66) | func (sc *snowflakeConn) getArrayBindStageThreshold() int {
method connectionTelemetry (line 78) | func (sc *snowflakeConn) connectionTelemetry(cfg *Config) {
method processFileTransfer (line 100) | func (sc *snowflakeConn) processFileTransfer(
function getFileStream (line 150) | func getFileStream(ctx context.Context) (io.Reader, error) {
function isFileGetStream (line 162) | func isFileGetStream(ctx context.Context) bool {
function getFileTransferOptions (line 167) | func getFileTransferOptions(ctx context.Context) *SnowflakeFileTransferO...
function writeFileStream (line 179) | func writeFileStream(ctx context.Context, streamBuf *bytes.Buffer) error {
method populateSessionParameters (line 192) | func (sc *snowflakeConn) populateSessionParameters(parameters []nameValu...
method configureTelemetry (line 220) | func (sc *snowflakeConn) configureTelemetry() {
function isAsyncMode (line 231) | func isAsyncMode(ctx context.Context) bool {
function isDescribeOnly (line 235) | func isDescribeOnly(ctx context.Context) bool {
function isInternal (line 239) | func isInternal(ctx context.Context) bool {
function isLogQueryTextEnabled (line 243) | func isLogQueryTextEnabled(ctx context.Context) bool {
function isLogQueryParametersEnabled (line 247) | func isLogQueryParametersEnabled(ctx context.Context) bool {
function isBooleanContextEnabled (line 251) | func isBooleanContextEnabled(ctx context.Context, key ContextKey) bool {
function setResultType (line 260) | func setResultType(ctx context.Context, resType resultType) context.Cont...
function getResultType (line 264) | func getResultType(ctx context.Context) resultType {
function isDml (line 269) | func isDml(v int64) bool {
function isDql (line 273) | func isDql(data *execResponseData) bool {
function updateRows (line 277) | func updateRows(data execResponseData) (int64, error) {
function isMultiStmt (line 295) | func isMultiStmt(data *execResponseData) bool {
function getResumeQueryID (line 300) | func getResumeQueryID(ctx context.Context) (string, error) {
function populateChunkDownloader (line 322) | func populateChunkDownloader(
function checkIsPrivateLink (line 355) | func checkIsPrivateLink(host string) bool {
function isStatementContext (line 359) | func isStatementContext(ctx context.Context) bool {
FILE: connectivity_diagnosis.go
type connectivityDiagnoser (line 22) | type connectivityDiagnoser struct
method openAndReadAllowlistJSON (line 122) | func (cd *connectivityDiagnoser) openAndReadAllowlistJSON(filePath str...
method resolveHostname (line 139) | func (cd *connectivityDiagnoser) resolveHostname(hostname string) {
method isAcceptableStatusCode (line 153) | func (cd *connectivityDiagnoser) isAcceptableStatusCode(statusCode int...
method fetchCRL (line 157) | func (cd *connectivityDiagnoser) fetchCRL(uri string) error {
method doHTTP (line 204) | func (cd *connectivityDiagnoser) doHTTP(request *http.Request) error {
method doHTTPSGetCerts (line 233) | func (cd *connectivityDiagnoser) doHTTPSGetCerts(request *http.Request...
method createRequest (line 297) | func (cd *connectivityDiagnoser) createRequest(uri string) (*http.Requ...
method checkProxy (line 306) | func (cd *connectivityDiagnoser) checkProxy(req *http.Request) {
method performConnectivityCheck (line 325) | func (cd *connectivityDiagnoser) performConnectivityCheck(entryType, h...
function newConnectivityDiagnoser (line 26) | func newConnectivityDiagnoser(cfg *Config) *connectivityDiagnoser {
type allowlistEntry (line 32) | type allowlistEntry struct
type allowlist (line 38) | type allowlist struct
function createDiagnosticClient (line 51) | func createDiagnosticClient(cfg *Config) *http.Client {
function createDiagnosticDialContext (line 67) | func createDiagnosticDialContext() func(ctx context.Context, network, ad...
function createDiagnosticTransport (line 100) | func createDiagnosticTransport(cfg *Config) *http.Transport {
function performDiagnosis (line 362) | func performDiagnosis(cfg *Config, downloadCRLs bool) {
FILE: connectivity_diagnosis_test.go
function setupTestLogger (line 25) | func setupTestLogger() (buffer *bytes.Buffer, cleanup func()) {
function TestSetupTestLogger (line 40) | func TestSetupTestLogger(t *testing.T) {
type tcDiagnosticClient (line 69) | type tcDiagnosticClient struct
type tcOpenAllowlistJSON (line 75) | type tcOpenAllowlistJSON struct
type tcAcceptableStatusCode (line 82) | type tcAcceptableStatusCode struct
type tcFetchCRL (line 87) | type tcFetchCRL struct
type tcCreateRequest (line 94) | type tcCreateRequest struct
type tcDoHTTP (line 100) | type tcDoHTTP struct
type tcDoHTTPSGetCerts (line 108) | type tcDoHTTPSGetCerts struct
type tcResolveHostname (line 116) | type tcResolveHostname struct
type tcPerformConnectivityCheck (line 121) | type tcPerformConnectivityCheck struct
function TestCreateDiagnosticClient (line 130) | func TestCreateDiagnosticClient(t *testing.T) {
function TestCreateDiagnosticDialContext (line 159) | func TestCreateDiagnosticDialContext(t *testing.T) {
function TestOpenAndReadAllowlistJSON (line 179) | func TestOpenAndReadAllowlistJSON(t *testing.T) {
function TestIsAcceptableStatusCode (line 237) | func TestIsAcceptableStatusCode(t *testing.T) {
function TestFetchCRL (line 257) | func TestFetchCRL(t *testing.T) {
function TestCreateRequest (line 327) | func TestCreateRequest(t *testing.T) {
function TestDoHTTP (line 363) | func TestDoHTTP(t *testing.T) {
function TestDoHTTPSGetCerts (line 464) | func TestDoHTTPSGetCerts(t *testing.T) {
function TestCheckProxy (line 530) | func TestCheckProxy(t *testing.T) {
function TestResolveHostname (line 600) | func TestResolveHostname(t *testing.T) {
function TestPerformConnectivityCheck (line 636) | func TestPerformConnectivityCheck(t *testing.T) {
function TestPerformDiagnosis (line 685) | func TestPerformDiagnosis(t *testing.T) {
FILE: connector.go
type InternalSnowflakeDriver (line 11) | type InternalSnowflakeDriver interface
type Connector (line 17) | type Connector struct
method Connect (line 28) | func (t Connector) Connect(ctx context.Context) (driver.Conn, error) {
method Driver (line 38) | func (t Connector) Driver() driver.Driver {
function NewConnector (line 23) | func NewConnector(driver InternalSnowflakeDriver, config Config) driver....
FILE: connector_test.go
type noopTestDriver (line 15) | type noopTestDriver struct
method Open (line 20) | func (d *noopTestDriver) Open(_ string) (driver.Conn, error) {
method OpenWithConfig (line 24) | func (d *noopTestDriver) OpenWithConfig(_ context.Context, config Conf...
function TestConnector (line 29) | func TestConnector(t *testing.T) {
function TestConnectorWithMissingConfig (line 58) | func TestConnectorWithMissingConfig(t *testing.T) {
function TestConnectorCancelContext (line 78) | func TestConnectorCancelContext(t *testing.T) {
FILE: converter.go
constant format (line 29) | format = "2006-01-02 15:04:05.999999999"
constant numberDefaultPrecision (line 30) | numberDefaultPrecision = 38
constant jsonFormatStr (line 31) | jsonFormatStr = "json"
constant numberMaxPrecisionInBits (line 33) | numberMaxPrecisionInBits = 127
constant decfloatPrintingPrec (line 36) | decfloatPrintingPrec = 40
type timezoneType (line 38) | type timezoneType
constant TimestampNTZType (line 45) | TimestampNTZType timezoneType = iota
constant TimestampLTZType (line 47) | TimestampLTZType
constant TimestampTZType (line 49) | TimestampTZType
constant DateType (line 51) | DateType
constant TimeType (line 53) | TimeType
type interfaceArrayBinding (line 56) | type interfaceArrayBinding struct
function isInterfaceArrayBinding (line 62) | func isInterfaceArrayBinding(t any) bool {
function isJSONFormatType (line 73) | func isJSONFormatType(tsmode types.SnowflakeType) bool {
function goTypeToSnowflake (line 78) | func goTypeToSnowflake(v driver.Value, tsmode types.SnowflakeType) types...
function snowflakeTypeToGo (line 134) | func snowflakeTypeToGo(ctx context.Context, dbtype types.SnowflakeType, ...
function snowflakeTypeToGoForMaps (line 225) | func snowflakeTypeToGoForMaps[K comparable](ctx context.Context, valueMe...
function valueToString (line 254) | func valueToString(v driver.Value, tsmode types.SnowflakeType, params *s...
function isUUIDImplementer (line 309) | func isUUIDImplementer(v reflect.Value) bool {
function arrayToString (line 335) | func arrayToString(v driver.Value, tsmode types.SnowflakeType, params *s...
function mapToString (line 462) | func mapToString(v driver.Value, tsmode types.SnowflakeType, params *syn...
function toNullableInt64 (line 640) | func toNullableInt64(val any) (int64, bool) {
function toNullableTime (line 655) | func toNullableTime(val any) (time.Time, bool, error) {
function stringOrIntToString (line 665) | func stringOrIntToString(v reflect.Value) string {
function goTypeToFieldMetadata (line 672) | func goTypeToFieldMetadata(typ reflect.Type, tsmode types.SnowflakeType,...
function isSliceOfSlices (line 800) | func isSliceOfSlices(v any) bool {
function isArrayOfStructs (line 805) | func isArrayOfStructs(v any) bool {
function structValueToString (line 809) | func structValueToString(v driver.Value, tsmode types.SnowflakeType, par...
function timeTypeValueToString (line 907) | func timeTypeValueToString(tm time.Time, tsmode types.SnowflakeType) (bi...
function extractTimestamp (line 929) | func extractTimestamp(srcValue *string) (sec int64, nsec int64, err erro...
function stringToValue (line 961) | func stringToValue(ctx context.Context, dest *driver.Value, srcColumnMet...
function jsonToMap (line 1130) | func jsonToMap(ctx context.Context, keyMetadata, valueMetadata query.Fie...
function jsonToMapWithKeyType (line 1176) | func jsonToMapWithKeyType[K comparable](ctx context.Context, valueMetada...
function buildArrayFromMap (line 1295) | func buildArrayFromMap[K comparable, V any](ctx context.Context, valueMe...
function buildStructuredTypeFromMap (line 1311) | func buildStructuredTypeFromMap(values map[string]any, fieldMetadata []q...
function ifNotNullOrDefault (line 1319) | func ifNotNullOrDefault[T any](t any, def T) T {
function buildMapValues (line 1326) | func buildMapValues[K comparable, Vnullable any, VnotNullable any](mapVa...
function buildStructuredArray (line 1349) | func buildStructuredArray(ctx context.Context, fieldMetadata query.Field...
function buildStructuredArrayRecursive (line 1414) | func buildStructuredArrayRecursive[T any](ctx context.Context, fieldMeta...
function copyArrayAndConvert (line 1426) | func copyArrayAndConvert[T any](input []any, convertFunc func(input any)...
function buildStructuredTypeRecursive (line 1437) | func buildStructuredTypeRecursive(ctx context.Context, m map[string]any,...
function intToBigFloat (line 1463) | func intToBigFloat(val int64, scale int64) *big.Float {
function decimalToBigInt (line 1469) | func decimalToBigInt(num decimal128.Num) *big.Int {
function decimalToBigFloat (line 1475) | func decimalToBigFloat(num decimal128.Num, scale int64) *big.Float {
function arrowSnowflakeTimestampToTime (line 1481) | func arrowSnowflakeTimestampToTime(
function extractEpoch (line 1539) | func extractEpoch(value int64, scale int) int64 {
function extractFraction (line 1543) | func extractFraction(value int64, scale int) int64 {
function arrowToValues (line 1549) | func arrowToValues(
function arrowToValue (line 1573) | func arrowToValue(ctx context.Context, rowIdx int, srcColumnMeta query.F...
function buildMapFromNativeArrow (line 1699) | func buildMapFromNativeArrow(ctx context.Context, rowIdx int, keyMetadat...
function buildListFromNativeArrow (line 1726) | func buildListFromNativeArrow(ctx context.Context, rowIdx int, fieldMeta...
function buildArrowListRecursive (line 2046) | func buildArrowListRecursive[T any](ctx context.Context, rowIdx int, fie...
function mapStructuredArrayNativeArrowRows (line 2060) | func mapStructuredArrayNativeArrowRows[T any](offsets []int32, rowIdx in...
function extractInt64 (line 2072) | func extractInt64(values arrow.Array, j int) (int64, error) {
function buildStructuredMapFromArrow (line 2088) | func buildStructuredMapFromArrow[K comparable](ctx context.Context, rowI...
function buildListFromNativeArrowMap (line 2262) | func buildListFromNativeArrowMap[K comparable, V any](ctx context.Contex...
function buildTimeFromNativeArrowArray (line 2272) | func buildTimeFromNativeArrowArray[K comparable](mapNullValuesEnabled bo...
function mapStructuredMapNativeArrowFixedValue (line 2289) | func mapStructuredMapNativeArrowFixedValue[V any](valueMetadata query.Fi...
function extractNumberFromArrow (line 2297) | func extractNumberFromArrow(values *arrow.Array, j int, higherPrecision ...
function mapStructuredMapNativeArrowRows (line 2313) | func mapStructuredMapNativeArrowRows[K comparable, V any](m map[K]V, off...
function arrowToStructuredType (line 2326) | func arrowToStructuredType(ctx context.Context, structs *array.Struct, f...
function arrowStringToValue (line 2393) | func arrowStringToValue(srcValue *array.String, rowIdx int) snowflakeVal...
function arrowDecimal128ToValue (line 2400) | func arrowDecimal128ToValue(srcValue *array.Decimal128, rowIdx int, high...
function arrowInt64ToValue (line 2418) | func arrowInt64ToValue(srcValue *array.Int64, rowIdx int, higherPrecisio...
function arrowInt32ToValue (line 2426) | func arrowInt32ToValue(srcValue *array.Int32, rowIdx int, higherPrecisio...
function arrowInt16ToValue (line 2434) | func arrowInt16ToValue(srcValue *array.Int16, rowIdx int, higherPrecisio...
function arrowInt8ToValue (line 2442) | func arrowInt8ToValue(srcValue *array.Int8, rowIdx int, higherPrecision ...
function arrowIntToValue (line 2450) | func arrowIntToValue(srcColumnMeta query.FieldMetadata, higherPrecision ...
function arrowRealToValue (line 2467) | func arrowRealToValue(srcValue *array.Float64, rowIdx int) snowflakeValue {
function arrowDecFloatToValue (line 2474) | func arrowDecFloatToValue(ctx context.Context, srcValue *array.Struct, r...
function parseTwosComplementBigEndian (line 2514) | func parseTwosComplementBigEndian(b []byte) (*big.Int, error) {
function arrowBoolToValue (line 2535) | func arrowBoolToValue(srcValue *array.Boolean, rowIdx int) snowflakeValue {
function arrowBinaryToValue (line 2542) | func arrowBinaryToValue(srcValue *array.Binary, rowIdx int) snowflakeVal...
function arrowDateToValue (line 2549) | func arrowDateToValue(srcValue *array.Date32, rowID int) snowflakeValue {
function arrowTimeToValue (line 2556) | func arrowTimeToValue(srcValue arrow.Array, rowIdx int, scale int) snowf...
type intArray (line 2571) | type intArray
type int32Array (line 2572) | type int32Array
type int64Array (line 2573) | type int64Array
type float64Array (line 2574) | type float64Array
type float32Array (line 2575) | type float32Array
type decfloatArray (line 2576) | type decfloatArray
type boolArray (line 2577) | type boolArray
type stringArray (line 2578) | type stringArray
type byteArray (line 2579) | type byteArray
type timestampNtzArray (line 2580) | type timestampNtzArray
type timestampLtzArray (line 2581) | type timestampLtzArray
type timestampTzArray (line 2582) | type timestampTzArray
type dateArray (line 2583) | type dateArray
type timeArray (line 2584) | type timeArray
function Array (line 2589) | func Array(a any, typ ...any) (any, error) {
function snowflakeArrayToString (line 2695) | func snowflakeArrayToString(nv *driver.NamedValue, stream bool) (types.S...
function interfaceSliceToString (line 2842) | func interfaceSliceToString(interfaceSlice reflect.Value, stream bool, t...
function higherPrecisionEnabled (line 2960) | func higherPrecisionEnabled(ctx context.Context) bool {
function decfloatMappingEnabled (line 2964) | func decfloatMappingEnabled(ctx context.Context) bool {
type TypedNullTime (line 2975) | type TypedNullTime struct
function convertTzTypeToSnowflakeType (line 2980) | func convertTzTypeToSnowflakeType(tzType timezoneType) types.SnowflakeTy...
function getTimestampBindValue (line 2996) | func getTimestampBindValue(x time.Time, stream bool, t types.SnowflakeTy...
function convertTimeToTimeStamp (line 3003) | func convertTimeToTimeStamp(x time.Time, t types.SnowflakeType) (string,...
function decoderWithNumbersAsStrings (line 3019) | func decoderWithNumbersAsStrings(srcValue *string) *json.Decoder {
FILE: converter_test.go
function stringIntToDecimal (line 25) | func stringIntToDecimal(src string) (decimal128.Num, bool) {
function stringFloatToDecimal (line 35) | func stringFloatToDecimal(src string, scale int64) (decimal128.Num, bool) {
function stringFloatToInt (line 51) | func stringFloatToInt(src string, scale int64) (int64, bool) {
type testValueToStringStructuredObject (line 66) | type testValueToStringStructuredObject struct
method Write (line 72) | func (o *testValueToStringStructuredObject) Write(sowc StructuredObjec...
function TestValueToString (line 85) | func TestValueToString(t *testing.T) {
function TestExtractTimestamp (line 218) | func TestExtractTimestamp(t *testing.T) {
function TestStringToValue (line 236) | func TestStringToValue(t *testing.T) {
type tcArrayToString (line 290) | type tcArrayToString struct
function TestArrayToString (line 296) | func TestArrayToString(t *testing.T) {
function TestArrowToValues (line 322) | func TestArrowToValues(t *testing.T) {
function TestTimestampLTZLocation (line 871) | func TestTimestampLTZLocation(t *testing.T) {
function TestSmallTimestampBinding (line 900) | func TestSmallTimestampBinding(t *testing.T) {
function TestTimestampConversionWithoutArrowBatches (line 936) | func TestTimestampConversionWithoutArrowBatches(t *testing.T) {
function TestTimeTypeValueToString (line 988) | func TestTimeTypeValueToString(t *testing.T) {
function TestIsArrayOfStructs (line 1022) | func TestIsArrayOfStructs(t *testing.T) {
function TestSqlNull (line 1043) | func TestSqlNull(t *testing.T) {
function TestNumbersScanType (line 1058) | func TestNumbersScanType(t *testing.T) {
function mustArray (line 1194) | func mustArray(v any, typ ...any) driver.Value {
FILE: crl.go
constant snowflakeCrlCacheValidityTimeEnv (line 22) | snowflakeCrlCacheValidityTimeEnv = "SNOWFLAKE_CRL_CACHE_VALIDITY_TIME"
type distributionPointName (line 26) | type distributionPointName struct
type issuingDistributionPoint (line 30) | type issuingDistributionPoint struct
type crlValidator (line 34) | type crlValidator struct
method verifyPeerCertificates (line 163) | func (cv *crlValidator) verifyPeerCertificates(rawCerts [][]byte, veri...
method validateChains (line 193) | func (cv *crlValidator) validateChains(chains [][]*x509.Certificate) [...
method validateCertificate (line 244) | func (cv *crlValidator) validateCertificate(cert *x509.Certificate, pa...
method validateCrlAgainstCrlURL (line 259) | func (cv *crlValidator) validateCrlAgainstCrlURL(cert *x509.Certificat...
method validateCrl (line 317) | func (cv *crlValidator) validateCrl(crl *x509.RevocationList, parent *...
method getFromCache (line 334) | func (cv *crlValidator) getFromCache(crlURL string) (*x509.RevocationL...
method updateCache (line 392) | func (cv *crlValidator) updateCache(crlURL string, crl *x509.Revocatio...
method downloadCrl (line 429) | func (cv *crlValidator) downloadCrl(crlURL string) (*x509.RevocationLi...
method crlURLToPath (line 483) | func (cv *crlValidator) crlURLToPath(crlURL string) string {
method verifyAgainstIdpExtension (line 488) | func (cv *crlValidator) verifyAgainstIdpExtension(crl *x509.Revocation...
method getOrCreateMutex (line 508) | func (cv *crlValidator) getOrCreateMutex(crlURL string) *sync.Mutex {
type crlCacheCleanerType (line 44) | type crlCacheCleanerType struct
method startPeriodicCacheCleanup (line 535) | func (ccc *crlCacheCleanerType) startPeriodicCacheCleanup() {
method stopPeriodicCacheCleanup (line 561) | func (ccc *crlCacheCleanerType) stopPeriodicCacheCleanup() {
method cleanupInMemoryCache (line 575) | func (ccc *crlCacheCleanerType) cleanupInMemoryCache() {
method cleanupOnDiskCache (line 590) | func (ccc *crlCacheCleanerType) cleanupOnDiskCache() {
type crlInMemoryCacheValueType (line 53) | type crlInMemoryCacheValueType struct
function newCrlValidator (line 67) | func newCrlValidator(certRevocationCheckMode CertRevocationCheckMode, al...
function initCrlCacheCleaner (line 81) | func initCrlCacheCleaner() {
constant CertRevocationCheckDisabled (line 132) | CertRevocationCheckDisabled = sfconfig.CertRevocationCheckDisabled
constant CertRevocationCheckAdvisory (line 135) | CertRevocationCheckAdvisory = sfconfig.CertRevocationCheckAdvisory
constant CertRevocationCheckEnabled (line 137) | CertRevocationCheckEnabled = sfconfig.CertRevocationCheckEnabled
type crlValidationResult (line 140) | type crlValidationResult
constant crlRevoked (line 143) | crlRevoked crlValidationResult = iota
constant crlUnrevoked (line 144) | crlUnrevoked
constant crlError (line 145) | crlError
type certValidationResult (line 148) | type certValidationResult
constant certRevoked (line 151) | certRevoked certValidationResult = iota
constant certUnrevoked (line 152) | certUnrevoked
constant certError (line 153) | certError
constant defaultCrlHTTPClientTimeout (line 157) | defaultCrlHTTPClientTimeout = 10 * time.Second
constant defaultCrlCacheValidityTime (line 158) | defaultCrlCacheValidityTime = 24 * time.Hour
constant defaultCrlOnDiskCacheRemovalDelay (line 159) | defaultCrlOnDiskCacheRemovalDelay = 7 * time.Hour
constant defaultCrlDownloadMaxSize (line 160) | defaultCrlDownloadMaxSize = 20 * 1024 * 1024
function isShortLivedCertificate (line 519) | func isShortLivedCertificate(cert *x509.Certificate) bool {
function defaultCrlOnDiskCacheDir (line 622) | func defaultCrlOnDiskCacheDir() (string, error) {
FILE: crl_test.go
type allowCertificatesWithoutCrlURLType (line 27) | type allowCertificatesWithoutCrlURLType
type inMemoryCacheDisabledType (line 28) | type inMemoryCacheDisabledType
type onDiskCacheDisabledType (line 29) | type onDiskCacheDisabledType
type downloadMaxSizeType (line 30) | type downloadMaxSizeType
type notAfterType (line 32) | type notAfterType
type crlEndpointType (line 33) | type crlEndpointType
type revokedCert (line 35) | type revokedCert
type thisUpdateType (line 37) | type thisUpdateType
type nextUpdateType (line 38) | type nextUpdateType
function newTestCrlValidator (line 40) | func newTestCrlValidator(t *testing.T, checkMode CertRevocationCheckMode...
function TestCrlCheckModeDisabledNoHttpCall (line 70) | func TestCrlCheckModeDisabledNoHttpCall(t *testing.T) {
function TestCrlModes (line 80) | func TestCrlModes(t *testing.T) {
function cleanupCrlCache (line 805) | func cleanupCrlCache(t *testing.T) {
function TestRealCrlWithIdpExtension (line 817) | func TestRealCrlWithIdpExtension(t *testing.T) {
function TestParallelRequestToTheSameCrl (line 830) | func TestParallelRequestToTheSameCrl(t *testing.T) {
function TestIsShortLivedCertificate (line 859) | func TestIsShortLivedCertificate(t *testing.T) {
type malformedCrlRoundTripper (line 938) | type malformedCrlRoundTripper struct
method RoundTrip (line 941) | func (m *malformedCrlRoundTripper) RoundTrip(req *http.Request) (*http...
function createCa (line 949) | func createCa(t *testing.T, issuerCert *x509.Certificate, issuerPrivateK...
function createLeafCert (line 968) | func createLeafCert(t *testing.T, issuerCert *x509.Certificate, issuerPr...
function createCert (line 996) | func createCert(t *testing.T, template, issuerCert *x509.Certificate, is...
function calculateKeyID (line 1014) | func calculateKeyID(t *testing.T, pubKey any) []byte {
function createCrl (line 1021) | func createCrl(t *testing.T, issuerCert *x509.Certificate, issuerPrivate...
type crlEndpointDef (line 1057) | type crlEndpointDef struct
function newCrlEndpointDef (line 1062) | func newCrlEndpointDef(endpoint string, crl *x509.RevocationList) *crlEn...
function createCrlServer (line 1069) | func createCrlServer(t *testing.T) (*http.Server, int) {
function registerCrlEndpoints (line 1085) | func registerCrlEndpoints(t *testing.T, server *http.Server, endpointDef...
function fullCrlURL (line 1095) | func fullCrlURL(port int, endpoint string) string {
function closeServer (line 1099) | func closeServer(t *testing.T, server *http.Server) {
function TestCrlE2E (line 1104) | func TestCrlE2E(t *testing.T) {
FILE: ctx_test.go
function TestCtxVal (line 11) | func TestCtxVal(t *testing.T) {
function TestLogCtx (line 41) | func TestLogCtx(t *testing.T) {
FILE: datatype.go
function dataTypeMode (line 50) | func dataTypeMode(v driver.Value) (tsmode types.SnowflakeType, err error) {
type SnowflakeParameter (line 89) | type SnowflakeParameter struct
function populateSnowflakeParameter (line 111) | func populateSnowflakeParameter(colname string, p *SnowflakeParameter) a...
function ScanSnowflakeParameter (line 156) | func ScanSnowflakeParameter(rows *sql.Rows) (*SnowflakeParameter, error) {
FILE: datatype_test.go
function TestDataTypeMode (line 11) | func TestDataTypeMode(t *testing.T) {
function TestPopulateSnowflakeParameter (line 52) | func TestPopulateSnowflakeParameter(t *testing.T) {
FILE: datetime.go
type formatReplacement (line 14) | type formatReplacement struct
function timeToString (line 36) | func timeToString(t time.Time, dateTimeType string, sp *syncParams) (str...
function snowflakeFormatToGoFormat (line 48) | func snowflakeFormatToGoFormat(sfFormat string) (string, error) {
function dateTimeOutputFormatByType (line 75) | func dateTimeOutputFormatByType(dateTimeType string, sp *syncParams) (st...
function dateTimeInputFormatByType (line 104) | func dateTimeInputFormatByType(dateTimeType string, sp *syncParams) (str...
FILE: datetime_test.go
function TestSnowflakeFormatToGoFormatUnitTest (line 8) | func TestSnowflakeFormatToGoFormatUnitTest(t *testing.T) {
function TestIncorrectSecondsFraction (line 61) | func TestIncorrectSecondsFraction(t *testing.T) {
function TestSnowflakeFormatToGoFormatIntegrationTest (line 66) | func TestSnowflakeFormatToGoFormatIntegrationTest(t *testing.T) {
FILE: driver.go
type SnowflakeDriver (line 15) | type SnowflakeDriver struct
method Open (line 18) | func (d SnowflakeDriver) Open(dsn string) (driver.Conn, error) {
method OpenConnector (line 35) | func (d SnowflakeDriver) OpenConnector(dsn string) (driver.Connector, ...
method OpenWithConfig (line 50) | func (d SnowflakeDriver) OpenWithConfig(ctx context.Context, config Co...
function runningOnGithubAction (line 101) | func runningOnGithubAction() bool {
function skipRegistration (line 111) | func skipRegistration() bool {
function init (line 115) | func init() {
FILE: driver_ocsp_test.go
function setenv (line 18) | func setenv(k, v string) {
function unsetenv (line 25) | func unsetenv(k string) {
function deleteOCSPCacheFile (line 33) | func deleteOCSPCacheFile() {
function deleteOCSPCacheAll (line 38) | func deleteOCSPCacheAll() {
function cleanup (line 44) | func cleanup() {
function TestOCSPFailOpen (line 53) | func TestOCSPFailOpen(t *testing.T) {
function isFailToConnectOrAuthErr (line 91) | func isFailToConnectOrAuthErr(driverErr *SnowflakeError) bool {
function TestOCSPFailOpenWithoutFileCache (line 95) | func TestOCSPFailOpenWithoutFileCache(t *testing.T) {
function TestOCSPFailOpenRevokedStatus (line 135) | func TestOCSPFailOpenRevokedStatus(t *testing.T) {
function TestOCSPFailClosedRevokedStatus (line 182) | func TestOCSPFailClosedRevokedStatus(t *testing.T) {
function TestOCSPFailOpenCacheServerTimeout (line 229) | func TestOCSPFailOpenCacheServerTimeout(t *testing.T) {
function TestOCSPFailClosedCacheServerTimeout (line 275) | func TestOCSPFailClosedCacheServerTimeout(t *testing.T) {
function TestOCSPFailOpenResponderTimeout (line 337) | func TestOCSPFailOpenResponderTimeout(t *testing.T) {
function TestOCSPFailClosedResponderTimeout (line 384) | func TestOCSPFailClosedResponderTimeout(t *testing.T) {
function TestOCSPFailOpenResponder404 (line 438) | func TestOCSPFailOpenResponder404(t *testing.T) {
function TestOCSPFailClosedResponder404 (line 479) | func TestOCSPFailClosedResponder404(t *testing.T) {
function TestExpiredCertificate (line 520) | func TestExpiredCertificate(t *testing.T) {
function TestOCSPFailOpenNoOCSPURL (line 605) | func TestOCSPFailOpenNoOCSPURL(t *testing.T) {
function TestOCSPFailClosedNoOCSPURL (line 646) | func TestOCSPFailClosedNoOCSPURL(t *testing.T) {
function TestOCSPUnexpectedResponses (line 693) | func TestOCSPUnexpectedResponses(t *testing.T) {
function TestConnectionToMultipleConfigurations (line 758) | func TestConnectionToMultipleConfigurations(t *testing.T) {
FILE: driver_test.go
constant selectNumberSQL (line 50) | selectNumberSQL = "SELECT %s::NUMBER(%v, %v) AS C"
constant selectVariousTypes (line 51) | selectVariousTypes = "SELECT 1.0::NUMBER(30,2) as C1, 2::NUMBER(18,0)...
constant selectRandomGenerator (line 52) | selectRandomGenerator = "SELECT SEQ8(), RANDSTR(1000, RANDOM()) FROM TAB...
constant PSTLocation (line 53) | PSTLocation = "America/Los_Angeles"
function init (line 61) | func init() {
function createDSN (line 93) | func createDSN(timezone string) {
function setup (line 152) | func setup() (string, error) {
function teardown (line 180) | func teardown() error {
function TestMain (line 193) | func TestMain(m *testing.M) {
type DBTest (line 210) | type DBTest struct
method mustQueryT (line 215) | func (dbt *DBTest) mustQueryT(t *testing.T, query string, args ...any)...
method mustQuery (line 248) | func (dbt *DBTest) mustQuery(query string, args ...any) (rows *RowsExt...
method mustQueryContext (line 253) | func (dbt *DBTest) mustQueryContext(ctx context.Context, query string,...
method mustQueryContextT (line 258) | func (dbt *DBTest) mustQueryContextT(ctx context.Context, t *testing.T...
method query (line 291) | func (dbt *DBTest) query(query string, args ...any) (*sql.Rows, error) {
method mustQueryAssertCount (line 295) | func (dbt *DBTest) mustQueryAssertCount(query string, expected int, ar...
method prepare (line 307) | func (dbt *DBTest) prepare(query string) (*sql.Stmt, error) {
method fail (line 311) | func (dbt *DBTest) fail(method, query string, err error) {
method mustExec (line 318) | func (dbt *DBTest) mustExec(query string, args ...any) (res sql.Result) {
method mustExecT (line 322) | func (dbt *DBTest) mustExecT(t *testing.T, query string, args ...any) ...
method mustExecContext (line 326) | func (dbt *DBTest) mustExecContext(ctx context.Context, query string, ...
method mustExecContextT (line 334) | func (dbt *DBTest) mustExecContextT(ctx context.Context, t *testing.T,...
method exec (line 342) | func (dbt *DBTest) exec(query string, args ...any) (sql.Result, error) {
method mustDecimalSize (line 346) | func (dbt *DBTest) mustDecimalSize(ct *sql.ColumnType) (pr int64, sc i...
method mustFailDecimalSize (line 355) | func (dbt *DBTest) mustFailDecimalSize(ct *sql.ColumnType) {
method mustLength (line 362) | func (dbt *DBTest) mustLength(ct *sql.ColumnType) (cLen int64) {
method mustFailLength (line 371) | func (dbt *DBTest) mustFailLength(ct *sql.ColumnType) {
method mustNullable (line 378) | func (dbt *DBTest) mustNullable(ct *sql.ColumnType) (canNull bool) {
method mustPrepare (line 387) | func (dbt *DBTest) mustPrepare(query string) (stmt *sql.Stmt) {
method forceJSON (line 395) | func (dbt *DBTest) forceJSON() {
method forceArrow (line 399) | func (dbt *DBTest) forceArrow() {
method forceNativeArrow (line 405) | func (dbt *DBTest) forceNativeArrow() { // structured types
method enableStructuredTypes (line 411) | func (dbt *DBTest) enableStructuredTypes() {
method enableStructuredTypesBinding (line 426) | func (dbt *DBTest) enableStructuredTypesBinding() {
type SCTest (line 438) | type SCTest struct
method fail (line 443) | func (sct *SCTest) fail(method, query string, err error) {
method mustExec (line 450) | func (sct *SCTest) mustExec(query string, args []driver.Value) driver....
method mustQuery (line 457) | func (sct *SCTest) mustQuery(query string, args []driver.Value) driver...
method mustQueryContext (line 465) | func (sct *SCTest) mustQueryContext(ctx context.Context, query string,...
type testConfig (line 473) | type testConfig struct
function runDBTest (line 477) | func runDBTest(t *testing.T, test func(dbt *DBTest)) {
function runDBTestWithConfig (line 481) | func runDBTestWithConfig(t *testing.T, testCfg *testConfig, test func(db...
function runSnowflakeConnTest (line 491) | func runSnowflakeConnTest(t *testing.T, test func(sct *SCTest)) {
function runSnowflakeConnTestWithConfig (line 495) | func runSnowflakeConnTestWithConfig(t *testing.T, testCfg *testConfig, t...
function getDbHandlerFromConfig (line 514) | func getDbHandlerFromConfig(t *testing.T, cfg *Config) *sql.DB {
function runningOnAWS (line 524) | func runningOnAWS() bool {
function runningOnGCP (line 528) | func runningOnGCP() bool {
function runningOnLinux (line 532) | func runningOnLinux() bool {
function TestKnownUserInvalidPasswordParameters (line 536) | func TestKnownUserInvalidPasswordParameters(t *testing.T) {
function TestCommentOnlyQuery (line 557) | func TestCommentOnlyQuery(t *testing.T) {
function TestEmptyQuery (line 574) | func TestEmptyQuery(t *testing.T) {
function TestEmptyQueryWithRequestID (line 590) | func TestEmptyQueryWithRequestID(t *testing.T) {
function TestRequestIDFromTwoDifferentSessions (line 602) | func TestRequestIDFromTwoDifferentSessions(t *testing.T) {
function TestCRUD (line 699) | func TestCRUD(t *testing.T) {
function TestInt (line 797) | func TestInt(t *testing.T) {
function testInt (line 801) | func testInt(t *testing.T, json bool) {
function TestFloat32 (line 835) | func TestFloat32(t *testing.T) {
function testFloat32 (line 839) | func testFloat32(t *testing.T, json bool) {
function TestFloat64 (line 873) | func TestFloat64(t *testing.T) {
function testFloat64 (line 877) | func testFloat64(t *testing.T, json bool) {
function TestDecfloat (line 908) | func TestDecfloat(t *testing.T) {
function TestString (line 1070) | func TestString(t *testing.T) {
function testString (line 1074) | func testString(t *testing.T, json bool) {
type testUUID (line 1130) | type testUUID struct
method Scan (line 1148) | func (uuid *testUUID) Scan(src any) error {
method Value (line 1187) | func (uuid testUUID) Value() (driver.Value, error) {
function newTestUUID (line 1134) | func newTestUUID() testUUID {
function parseTestUUID (line 1138) | func parseTestUUID(str string) testUUID {
function TestUUID (line 1191) | func TestUUID(t *testing.T) {
function testUUIDWithFormat (line 1200) | func testUUIDWithFormat(t *testing.T, json, arrow bool) {
type tcDateTimeTimestamp (line 1240) | type tcDateTimeTimestamp struct
type timeTest (line 1246) | type timeTest struct
method genQuery (line 1251) | func (tt timeTest) genQuery() string {
method run (line 1255) | func (tt timeTest) run(t *testing.T, dbt *DBTest, dbtype, tlayout stri...
function TestSimpleDateTimeTimestampFetch (line 1303) | func TestSimpleDateTimeTimestampFetch(t *testing.T) {
function testSimpleDateTimeTimestampFetch (line 1307) | func testSimpleDateTimeTimestampFetch(t *testing.T, json bool) {
function TestDateTime (line 1339) | func TestDateTime(t *testing.T) {
function testDateTime (line 1343) | func testDateTime(t *testing.T, json bool) {
function TestTimestampLTZ (line 1407) | func TestTimestampLTZ(t *testing.T) {
function testTimestampLTZ (line 1411) | func testTimestampLTZ(t *testing.T, json bool) {
function TestTimestampTZ (line 1480) | func TestTimestampTZ(t *testing.T) {
function testTimestampTZ (line 1484) | func testTimestampTZ(t *testing.T, json bool) {
function TestNULL (line 1528) | func TestNULL(t *testing.T) {
function testNULL (line 1532) | func testNULL(t *testing.T, json bool) {
function TestVariant (line 1688) | func TestVariant(t *testing.T) {
function testVariant (line 1692) | func testVariant(t *testing.T, json bool) {
function TestArray (line 1710) | func TestArray(t *testing.T) {
function testArray (line 1714) | func testArray(t *testing.T, json bool) {
function TestLargeSetResult (line 1732) | func TestLargeSetResult(t *testing.T) {
function testLargeSetResult (line 1737) | func testLargeSetResult(t *testing.T, numrows int, json bool) {
function TestPingpongQuery (line 1764) | func TestPingpongQuery(t *testing.T) {
function TestDML (line 1779) | func TestDML(t *testing.T) {
function insertData (line 1805) | func insertData(dbt *DBTest, commit bool) error {
function queryTestTx (line 1840) | func queryTestTx(tx *sql.Tx) (*map[int]string, error) {
function queryTest (line 1859) | func queryTest(dbt *DBTest) (*map[int]string, error) {
function TestCancelQuery (line 1877) | func TestCancelQuery(t *testing.T) {
function TestCancelQueryWithConnectionContext (line 1892) | func TestCancelQueryWithConnectionContext(t *testing.T) {
function TestPing (line 1938) | func TestPing(t *testing.T) {
function TestDoubleDollar (line 1958) | func TestDoubleDollar(t *testing.T) {
function TestTimezoneSessionParameter (line 1980) | func TestTimezoneSessionParameter(t *testing.T) {
function TestLargeSetResultCancel (line 2000) | func TestLargeSetResultCancel(t *testing.T) {
function TestValidateDatabaseParameter (line 2028) | func TestValidateDatabaseParameter(t *testing.T) {
function TestSpecifyWarehouseDatabase (line 2100) | func TestSpecifyWarehouseDatabase(t *testing.T) {
function TestFetchNil (line 2119) | func TestFetchNil(t *testing.T) {
function TestPingInvalidHost (line 2139) | func TestPingInvalidHost(t *testing.T) {
function TestOpenWithConfig (line 2166) | func TestOpenWithConfig(t *testing.T) {
function TestOpenWithConfigCancel (line 2194) | func TestOpenWithConfigCancel(t *testing.T) {
function TestOpenWithInvalidConfig (line 2227) | func TestOpenWithInvalidConfig(t *testing.T) {
function TestOpenWithTransport (line 2241) | func TestOpenWithTransport(t *testing.T) {
function TestClientSessionKeepAliveParameter (line 2276) | func TestClientSessionKeepAliveParameter(t *testing.T) {
function TestTimePrecision (line 2299) | func TestTimePrecision(t *testing.T) {
function initPoolWithSize (line 2312) | func initPoolWithSize(t *testing.T, db *sql.DB, poolSize int) {
function initPoolWithSizeAndReturnErrors (line 2325) | func initPoolWithSizeAndReturnErrors(db *sql.DB, poolSize int) []error {
function runSelectCurrentUser (line 2349) | func runSelectCurrentUser(t *testing.T, db *sql.DB) string {
function runSmokeQuery (line 2360) | func runSmokeQuery(t *testing.T, db *sql.DB) {
function runSmokeQueryAndReturnErrors (line 2371) | func runSmokeQueryAndReturnErrors(db *sql.DB) error {
function runSmokeQueryWithConn (line 2391) | func runSmokeQueryWithConn(t *testing.T, conn *sql.Conn) {
FILE: dsn.go
constant configBoolNotSet (line 20) | configBoolNotSet = sfconfig.BoolNotSet
constant ConfigBoolTrue (line 22) | ConfigBoolTrue = sfconfig.BoolTrue
constant ConfigBoolFalse (line 24) | ConfigBoolFalse = sfconfig.BoolFalse
function DSN (line 28) | func DSN(cfg *Config) (string, error) { return sfconfig.DSN(cfg) }
function ParseDSN (line 31) | func ParseDSN(dsn string) (*Config, error) { return sfconfig.ParseDSN(ds...
function GetConfigFromEnv (line 34) | func GetConfigFromEnv(properties []*ConfigParam) (*Config, error) {
function transportConfigFor (line 38) | func transportConfigFor(tt transportType) *transportConfig {
FILE: easy_logging.go
type initTrials (line 17) | type initTrials struct
method setInitTrial (line 31) | func (i *initTrials) setInitTrial(clientConfigFileInput string) {
method increaseReconfigureCounter (line 36) | func (i *initTrials) increaseReconfigureCounter() {
function initEasyLogging (line 40) | func initEasyLogging(clientConfigFileInput string) error {
function easyLoggingInitError (line 81) | func easyLoggingInitError(err error) error {
function reconfigureEasyLogging (line 89) | func reconfigureEasyLogging(logLevel string, logPath string) error {
function createLogWriter (line 124) | func createLogWriter(logPath string) (io.Writer, *os.File, error) {
function allowedToInitialize (line 136) | func allowedToInitialize(clientConfigFileInput string) bool {
function getLogLevel (line 145) | func getLogLevel(logLevel string) (string, error) {
function getLogPath (line 153) | func getLogPath(logPath string) (string, error) {
function isDirAccessCorrect (line 184) | func isDirAccessCorrect(dirPath string) (bool, *os.FileMode, error) {
function dirExists (line 199) | func dirExists(dirPath string) (bool, error) {
FILE: easy_logging_test.go
function TestInitializeEasyLoggingOnlyOnceWhenConfigGivenAsAParameter (line 16) | func TestInitializeEasyLoggingOnlyOnceWhenConfigGivenAsAParameter(t *tes...
function TestConfigureEasyLoggingOnlyOnceWhenInitializedWithoutConfigFilePath (line 46) | func TestConfigureEasyLoggingOnlyOnceWhenInitializedWithoutConfigFilePat...
function TestReconfigureEasyLoggingIfConfigPathWasNotGivenForTheFirstTime (line 94) | func TestReconfigureEasyLoggingIfConfigPathWasNotGivenForTheFirstTime(t ...
function TestEasyLoggingFailOnUnknownLevel (line 134) | func TestEasyLoggingFailOnUnknownLevel(t *testing.T) {
function TestEasyLoggingFailOnNotExistingConfigFile (line 148) | func TestEasyLoggingFailOnNotExistingConfigFile(t *testing.T) {
function TestLogToConfiguredFile (line 159) | func TestLogToConfiguredFile(t *testing.T) {
function TestDataRace (line 194) | func TestDataRace(t *testing.T) {
function notEmptyLines (line 211) | func notEmptyLines(lines string) []string {
function cleanUp (line 218) | func cleanUp() {
function toClientConfigLevel (line 226) | func toClientConfigLevel(logLevel string) string {
function filterStrings (line 238) | func filterStrings(values []string, keep func(string) bool) []string {
function defaultConfig (line 248) | func defaultConfig(t *testing.T) *Config {
function openWithClientConfigFile (line 254) | func openWithClientConfigFile(t *testing.T, clientConfigFile string) err...
method reset (line 262) | func (i *initTrials) reset() {
FILE: encrypt_util.go
constant gcmIvLengthInBytes (line 17) | gcmIvLengthInBytes = 12
function encryptStreamCBC (line 36) | func encryptStreamCBC(
function encryptECB (line 120) | func encryptECB(encrypted []byte, fileKey []byte, decodedKey []byte) err...
function decryptECB (line 139) | func decryptECB(decrypted []byte, keyBytes []byte, decodedKey []byte) er...
function encryptFileCBC (line 158) | func encryptFileCBC(
function decryptFileKeyECB (line 193) | func decryptFileKeyECB(
function initCBC (line 222) | func initCBC(decryptedKey []byte, ivBytes []byte) (cipher.BlockMode, err...
function decryptFileCBC (line 232) | func decryptFileCBC(
function decryptStreamCBC (line 265) | func decryptStreamCBC(
function encryptGCM (line 315) | func encryptGCM(iv []byte, plaintext []byte, encryptionKey []byte, aad [...
function decryptGCM (line 323) | func decryptGCM(iv []byte, ciphertext []byte, encryptionKey []byte, aad ...
function initGcm (line 331) | func initGcm(encryptionKey []byte) (cipher.AEAD, error) {
function encryptFileGCM (line 339) | func encryptFileGCM(
function decryptFileGCM (line 410) | func decryptFileGCM(
type materialDescriptor (line 466) | type materialDescriptor struct
function matdescToUnicode (line 472) | func matdescToUnicode(matdesc materialDescriptor) (string, error) {
function getSecureRandom (line 480) | func getSecureRandom(byteLength int) []byte {
function padBytesLength (line 489) | func padBytesLength(src []byte, blockSize int) []byte {
function paddingTrim (line 495) | func paddingTrim(src []byte) ([]byte, error) {
function paddingOffset (line 516) | func paddingOffset(src []byte) int {
type contentKey (line 521) | type contentKey struct
type encryptionAgent (line 527) | type encryptionAgent struct
type keyMetadata (line 532) | type keyMetadata struct
type encryptionData (line 536) | type encryptionData struct
type snowflakeFileEncryption (line 544) | type snowflakeFileEncryption struct
type encryptionWrapper (line 553) | type encryptionWrapper struct
method UnmarshalJSON (line 25) | func (ew *encryptionWrapper) UnmarshalJSON(data []byte) error {
type encryptMetadata (line 558) | type encryptMetadata struct
type gcmEncryptMetadata (line 564) | type gcmEncryptMetadata struct
FILE: encrypt_util_test.go
constant timeFormat (line 21) | timeFormat = "2006-01-02T15:04:05"
type encryptDecryptTestFile (line 23) | type encryptDecryptTestFile struct
function TestEncryptDecryptFileCBC (line 28) | func TestEncryptDecryptFileCBC(t *testing.T) {
function TestEncryptDecryptFilePadding (line 73) | func TestEncryptDecryptFilePadding(t *testing.T) {
function TestEncryptDecryptLargeFileCBC (line 106) | func TestEncryptDecryptLargeFileCBC(t *testing.T) {
function TestEncryptStreamCBCReadError (line 123) | func TestEncryptStreamCBCReadError(t *testing.T) {
function TestDecryptStreamCBCReadError (line 138) | func TestDecryptStreamCBCReadError(t *testing.T) {
function encryptDecryptFile (line 164) | func encryptDecryptFile(t *testing.T, encMat snowflakeFileEncryption, ex...
function generateKLinesOfNByteRows (line 201) | func generateKLinesOfNByteRows(numLines int, numBytes int, tmpDir string...
function generateKLinesOfNFiles (line 219) | func generateKLinesOfNFiles(k int, n int, compress bool, tmpDir string) ...
function TestEncryptDecryptGCM (line 297) | func TestEncryptDecryptGCM(t *testing.T) {
function TestEncryptDecryptFileGCM (line 310) | func TestEncryptDecryptFileGCM(t *testing.T) {
FILE: errors.go
function generateTelemetryExceptionData (line 15) | func generateTelemetryExceptionData(se *SnowflakeError) *telemetryData {
function exceptionTelemetry (line 45) | func exceptionTelemetry(se *SnowflakeError, sc *snowflakeConn) *Snowflak...
function populateErrorFields (line 57) | func populateErrorFields(code int, data *execResponse) *SnowflakeError {
constant queryNotExecutingCode (line 76) | queryNotExecutingCode = "000605"
constant queryInProgressCode (line 77) | queryInProgressCode = "333333"
constant queryInProgressAsyncCode (line 78) | queryInProgressAsyncCode = "333334"
constant sessionExpiredCode (line 79) | sessionExpiredCode = "390112"
constant invalidOAuthAccessTokenCode (line 80) | invalidOAuthAccessTokenCode = "390303"
constant expiredOAuthAccessTokenCode (line 81) | expiredOAuthAccessTokenCode = "390318"
constant ErrCodeEmptyAccountCode (line 89) | ErrCodeEmptyAccountCode = sferrors.ErrCodeEmptyAccountCode
constant ErrCodeEmptyUsernameCode (line 91) | ErrCodeEmptyUsernameCode = sferrors.ErrCodeEmptyUsernameCode
constant ErrCodeEmptyPasswordCode (line 93) | ErrCodeEmptyPasswordCode = sferrors.ErrCodeEmptyPasswordCode
constant ErrCodeFailedToParseHost (line 95) | ErrCodeFailedToParseHost = sferrors.ErrCodeFailedToParseHost
constant ErrCodeFailedToParsePort (line 97) | ErrCodeFailedToParsePort = sferrors.ErrCodeFailedToParsePort
constant ErrCodeIdpConnectionError (line 99) | ErrCodeIdpConnectionError = sferrors.ErrCodeIdpConnectionError
constant ErrCodeSSOURLNotMatch (line 101) | ErrCodeSSOURLNotMatch = sferrors.ErrCodeSSOURLNotMatch
constant ErrCodeServiceUnavailable (line 103) | ErrCodeServiceUnavailable = sferrors.ErrCodeServiceUnavailable
constant ErrCodeFailedToConnect (line 105) | ErrCodeFailedToConnect = sferrors.ErrCodeFailedToConnect
constant ErrCodeRegionOverlap (line 107) | ErrCodeRegionOverlap = sferrors.ErrCodeRegionOverlap
constant ErrCodePrivateKeyParseError (line 109) | ErrCodePrivateKeyParseError = sferrors.ErrCodePrivateKeyParseError
constant ErrCodeFailedToParseAuthenticator (line 111) | ErrCodeFailedToParseAuthenticator = sferrors.ErrCodeFailedToParseAuthent...
constant ErrCodeClientConfigFailed (line 113) | ErrCodeClientConfigFailed = sferrors.ErrCodeClientConfigFailed
constant ErrCodeTomlFileParsingFailed (line 115) | ErrCodeTomlFileParsingFailed = sferrors.ErrCodeTomlFileParsingFailed
constant ErrCodeFailedToFindDSNInToml (line 117) | ErrCodeFailedToFindDSNInToml = sferrors.ErrCodeFailedToFindDSNInToml
constant ErrCodeInvalidFilePermission (line 119) | ErrCodeInvalidFilePermission = sferrors.ErrCodeInvalidFilePermission
constant ErrCodeEmptyPasswordAndToken (line 121) | ErrCodeEmptyPasswordAndToken = sferrors.ErrCodeEmptyPasswordAndToken
constant ErrCodeEmptyOAuthParameters (line 123) | ErrCodeEmptyOAuthParameters = sferrors.ErrCodeEmptyOAuthParameters
constant ErrMissingAccessATokenButRefreshTokenPresent (line 125) | ErrMissingAccessATokenButRefreshTokenPresent = sferrors.ErrMissingAccess...
constant ErrCodeMissingTLSConfig (line 127) | ErrCodeMissingTLSConfig = sferrors.ErrCodeMissingTLSConfig
constant ErrFailedToPostQuery (line 132) | ErrFailedToPostQuery = sferrors.ErrFailedToPostQuery
constant ErrFailedToRenewSession (line 134) | ErrFailedToRenewSession = sferrors.ErrFailedToRenewSession
constant ErrFailedToCancelQuery (line 136) | ErrFailedToCancelQuery = sferrors.ErrFailedToCancelQuery
constant ErrFailedToCloseSession (line 138) | ErrFailedToCloseSession = sferrors.ErrFailedToCloseSession
constant ErrFailedToAuth (line 140) | ErrFailedToAuth = sferrors.ErrFailedToAuth
constant ErrFailedToAuthSAML (line 142) | ErrFailedToAuthSAML = sferrors.ErrFailedToAuthSAML
constant ErrFailedToAuthOKTA (line 144) | ErrFailedToAuthOKTA = sferrors.ErrFailedToAuthOKTA
constant ErrFailedToGetSSO (line 146) | ErrFailedToGetSSO = sferrors.ErrFailedToGetSSO
constant ErrFailedToParseResponse (line 148) | ErrFailedToParseResponse = sferrors.ErrFailedToParseResponse
constant ErrFailedToGetExternalBrowserResponse (line 150) | ErrFailedToGetExternalBrowserResponse = sferrors.ErrFailedToGetExternalB...
constant ErrFailedToHeartbeat (line 152) | ErrFailedToHeartbeat = sferrors.ErrFailedToHeartbeat
constant ErrFailedToGetChunk (line 157) | ErrFailedToGetChunk = sferrors.ErrFailedToGetChunk
constant ErrNonArrowResponseInArrowBatches (line 159) | ErrNonArrowResponseInArrowBatches = sferrors.ErrNonArrowResponseInArrowB...
constant ErrNoReadOnlyTransaction (line 164) | ErrNoReadOnlyTransaction = sferrors.ErrNoReadOnlyTransaction
constant ErrNoDefaultTransactionIsolationLevel (line 166) | ErrNoDefaultTransactionIsolationLevel = sferrors.ErrNoDefaultTransaction...
constant ErrInvalidStageFs (line 171) | ErrInvalidStageFs = sferrors.ErrInvalidStageFs
constant ErrFailedToDownloadFromStage (line 173) | ErrFailedToDownloadFromStage = sferrors.ErrFailedToDownloadFromStage
constant ErrFailedToUploadToStage (line 175) | ErrFailedToUploadToStage = sferrors.ErrFailedToUploadToStage
constant ErrInvalidStageLocation (line 177) | ErrInvalidStageLocation = sferrors.ErrInvalidStageLocation
constant ErrLocalPathNotDirectory (line 179) | ErrLocalPathNotDirectory = sferrors.ErrLocalPathNotDirectory
constant ErrFileNotExists (line 181) | ErrFileNotExists = sferrors.ErrFileNotExists
constant ErrCompressionNotSupported (line 183) | ErrCompressionNotSupported = sferrors.ErrCompressionNotSupported
constant ErrInternalNotMatchEncryptMaterial (line 185) | ErrInternalNotMatchEncryptMaterial = sferrors.ErrInternalNotMatchEncrypt...
constant ErrCommandNotRecognized (line 187) | ErrCommandNotRecognized = sferrors.ErrCommandNotRecognized
constant ErrFailedToConvertToS3Client (line 189) | ErrFailedToConvertToS3Client = sferrors.ErrFailedToConvertToS3Client
constant ErrNotImplemented (line 191) | ErrNotImplemented = sferrors.ErrNotImplemented
constant ErrInvalidPadding (line 193) | ErrInvalidPadding = sferrors.ErrInvalidPadding
constant ErrBindSerialization (line 198) | ErrBindSerialization = sferrors.ErrBindSerialization
constant ErrBindUpload (line 200) | ErrBindUpload = sferrors.ErrBindUpload
constant ErrAsync (line 205) | ErrAsync = sferrors.ErrAsync
constant ErrNoResultIDs (line 210) | ErrNoResultIDs = sferrors.ErrNoResultIDs
constant ErrInvalidTimestampTz (line 215) | ErrInvalidTimestampTz = sferrors.ErrInvalidTimestampTz
constant ErrInvalidOffsetStr (line 218) | ErrInvalidOffsetStr = sferrors.ErrInvalidOffsetStr
constant ErrInvalidBinaryHexForm (line 220) | ErrInvalidBinaryHexForm = sferrors.ErrInvalidBinaryHexForm
constant ErrTooHighTimestampPrecision (line 222) | ErrTooHighTimestampPrecision = sferrors.ErrTooHighTimestampPrecision
constant ErrNullValueInArray (line 224) | ErrNullValueInArray = sferrors.ErrNullValueInArray
constant ErrNullValueInMap (line 226) | ErrNullValueInMap = sferrors.ErrNullValueInMap
constant ErrOCSPStatusRevoked (line 231) | ErrOCSPStatusRevoked = sferrors.ErrOCSPStatusRevoked
constant ErrOCSPStatusUnknown (line 233) | ErrOCSPStatusUnknown = sferrors.ErrOCSPStatusUnknown
constant ErrOCSPInvalidValidity (line 235) | ErrOCSPInvalidValidity = sferrors.ErrOCSPInvalidValidity
constant ErrOCSPNoOCSPResponderURL (line 237) | ErrOCSPNoOCSPResponderURL = sferrors.ErrOCSPNoOCSPResponderURL
constant ErrQueryStatus (line 242) | ErrQueryStatus = sferrors.ErrQueryStatus
constant ErrQueryIDFormat (line 244) | ErrQueryIDFormat = sferrors.ErrQueryIDFormat
constant ErrQueryReportedError (line 246) | ErrQueryReportedError = sferrors.ErrQueryReportedError
constant ErrQueryIsRunning (line 248) | ErrQueryIsRunning = sferrors.ErrQueryIsRunning
constant ErrSessionGone (line 253) | ErrSessionGone = sferrors.ErrSessionGone
constant ErrRoleNotExist (line 255) | ErrRoleNotExist = sferrors.ErrRoleNotExist
constant ErrObjectNotExistOrAuthorized (line 257) | ErrObjectNotExistOrAuthorized = sferrors.ErrObjectNotExistOrAuthorized
FILE: errors_test.go
function TestErrorMessage (line 8) | func TestErrorMessage(t *testing.T) {
FILE: file_compression_type.go
type compressionType (line 10) | type compressionType struct
function init (line 101) | func init() {
function lookupByMimeSubType (line 118) | func lookupByMimeSubType(mimeSubType string) *compressionType {
function lookupByExtension (line 125) | func lookupByExtension(extension string) *compressionType {
FILE: file_transfer_agent.go
type cloudType (line 32) | type cloudType
type commandType (line 33) | type commandType
constant fileProtocol (line 37) | fileProtocol = "file://"
constant multiPartThreshold (line 38) | multiPartThreshold int64 = 64 * 1024 * 1024
constant streamingMultiPartThreshold (line 39) | streamingMultiPartThreshold int64 = 8 * 1024 * 1024
constant isWindows (line 40) | isWindows = runtime.GOOS == "windows"
constant mb (line 41) | mb float64 = 1024.0 * 1024.0
constant uploadCommand (line 45) | uploadCommand commandType = "UPLOAD"
constant downloadCommand (line 46) | downloadCommand commandType = "DOWNLOAD"
constant unknownCommand (line 47) | unknownCommand commandType = "UNKNOWN"
constant putRegexp (line 49) | putRegexp string = `(?i)^(?:/\*.*\*/\s*)*\s*put\s+`
constant getRegexp (line 50) | getRegexp string = `(?i)^(?:/\*.*\*/\s*)*\s*get\s+`
constant s3Client (line 54) | s3Client cloudType = "S3"
constant azureClient (line 55) | azureClient cloudType = "AZURE"
constant gcsClient (line 56) | gcsClient cloudType = "GCS"
constant local (line 57) | local cloudType = "LOCAL_FS"
type resultStatus (line 60) | type resultStatus
method String (line 74) | func (rs resultStatus) String() string {
method isSet (line 80) | func (rs resultStatus) isSet() bool {
constant errStatus (line 63) | errStatus resultStatus = iota
constant uploaded (line 64) | uploaded
constant downloaded (line 65) | downloaded
constant skipped (line 66) | skipped
constant renewToken (line 67) | renewToken
constant renewPresignedURL (line 68) | renewPresignedURL
constant notFoundFile (line 69) | notFoundFile
constant needRetry (line 70) | needRetry
constant needRetryWithLowerConcurrency (line 71) | needRetryWithLowerConcurrency
type SnowflakeFileTransferOptions (line 86) | type SnowflakeFileTransferOptions struct
type snowflakeFileTransferAgent (line 104) | type snowflakeFileTransferAgent struct
method execute (line 130) | func (sfa *snowflakeFileTransferAgent) execute() error {
method parseCommand (line 213) | func (sfa *snowflakeFileTransferAgent) parseCommand() error {
method initEncryptionMaterial (line 299) | func (sfa *snowflakeFileTransferAgent) initEncryptionMaterial() {
method expandFilenames (line 316) | func (sfa *snowflakeFileTransferAgent) expandFilenames(locations []str...
method initFileMetadata (line 349) | func (sfa *snowflakeFileTransferAgent) initFileMetadata() error {
method processFileCompressionType (line 474) | func (sfa *snowflakeFileTransferAgent) processFileCompressionType() er...
method updateFileMetadataWithPresignedURL (line 567) | func (sfa *snowflakeFileTransferAgent) updateFileMetadataWithPresigned...
method transferAccelerateConfigWithUtil (line 648) | func (sfa *snowflakeFileTransferAgent) transferAccelerateConfigWithUti...
method transferAccelerateConfig (line 687) | func (sfa *snowflakeFileTransferAgent) transferAccelerateConfig() error {
method getLocalFilePathFromCommand (line 695) | func (sfa *snowflakeFileTransferAgent) getLocalFilePathFromCommand(com...
method upload (line 736) | func (sfa *snowflakeFileTransferAgent) upload(
method download (line 766) | func (sfa *snowflakeFileTransferAgent) download(
method uploadFilesParallel (line 784) | func (sfa *snowflakeFileTransferAgent) uploadFilesParallel(fileMetas [...
method uploadFilesSequential (line 880) | func (sfa *snowflakeFileTransferAgent) uploadFilesSequential(fileMetas...
method uploadOneFile (line 911) | func (sfa *snowflakeFileTransferAgent) uploadOneFile(meta *fileMetadat...
method downloadFilesParallel (line 952) | func (sfa *snowflakeFileTransferAgent) downloadFilesParallel(fileMetas...
method downloadOneFile (line 1035) | func (sfa *snowflakeFileTransferAgent) downloadOneFile(ctx context.Con...
method getStorageClient (line 1060) | func (sfa *snowflakeFileTransferAgent) getStorageClient(stageLocationT...
method renewExpiredClient (line 1074) | func (sfa *snowflakeFileTransferAgent) renewExpiredClient() (cloudClie...
method result (line 1089) | func (sfa *snowflakeFileTransferAgent) result() (*execResponse, error) {
type s3BucketAccelerateConfigGetter (line 639) | type s3BucketAccelerateConfigGetter interface
type s3ClientCreator (line 643) | type s3ClientCreator interface
function withCloudStorageTimeout (line 678) | func withCloudStorageTimeout[T any](ctx context.Context, cfg *Config, f ...
function isFileTransfer (line 1233) | func isFileTransfer(query string) bool {
type snowflakeProgressPercentage (line 1239) | type snowflakeProgressPercentage struct
method call (line 1249) | func (spp *snowflakeProgressPercentage) call(bytesAmount int64) {
method percent (line 1259) | func (spp *snowflakeProgressPercentage) percent(seenSoFar int64, size ...
method updateProgress (line 1266) | func (spp *snowflakeProgressPercentage) updateProgress(filename string...
function compressDataIfRequired (line 1298) | func compressDataIfRequired(meta *fileMetadata, fileUtil *snowflakeFileU...
function updateUploadSize (line 1310) | func updateUploadSize(meta *fileMetadata, fileUtil *snowflakeFileUtil) e...
function encryptDataIfRequired (line 1320) | func encryptDataIfRequired(meta *fileMetadata, ct cloudType) error {
FILE: file_transfer_agent_test.go
type tcFilePath (line 24) | type tcFilePath struct
function TestGetBucketAccelerateConfiguration (line 29) | func TestGetBucketAccelerateConfiguration(t *testing.T) {
type s3ClientCreatorMock (line 54) | type s3ClientCreatorMock struct
method extractBucketNameAndPath (line 59) | func (mock *s3ClientCreatorMock) extractBucketNameAndPath(location str...
method createClientWithConfig (line 63) | func (mock *s3ClientCreatorMock) createClientWithConfig(info *execResp...
type s3BucketAccelerateConfigGetterMock (line 67) | type s3BucketAccelerateConfigGetterMock struct
method GetBucketAccelerateConfiguration (line 71) | func (mock *s3BucketAccelerateConfigGetterMock) GetBucketAccelerateCon...
function TestGetBucketAccelerateConfigurationTooManyRetries (line 75) | func TestGetBucketAccelerateConfigurationTooManyRetries(t *testing.T) {
function TestGetBucketAccelerateConfigurationFailedExtractBucketNameAndPath (line 108) | func TestGetBucketAccelerateConfigurationFailedExtractBucketNameAndPath(...
function TestGetBucketAccelerateConfigurationFailedCreateClient (line 131) | func TestGetBucketAccelerateConfigurationFailedCreateClient(t *testing.T) {
function TestGetBucketAccelerateConfigurationInvalidClient (line 157) | func TestGetBucketAccelerateConfigurationInvalidClient(t *testing.T) {
function TestUnitDownloadWithInvalidLocalPath (line 183) | func TestUnitDownloadWithInvalidLocalPath(t *testing.T) {
function TestUnitGetLocalFilePathFromCommand (line 216) | func TestUnitGetLocalFilePathFromCommand(t *testing.T) {
function TestUnitProcessFileCompressionType (line 246) | func TestUnitProcessFileCompressionType(t *testing.T) {
function TestParseCommandWithInvalidStageLocation (line 293) | func TestParseCommandWithInvalidStageLocation(t *testing.T) {
function TestParseCommandEncryptionMaterialMismatchError (line 316) | func TestParseCommandEncryptionMaterialMismatchError(t *testing.T) {
function TestParseCommandInvalidStorageClientException (line 355) | func TestParseCommandInvalidStorageClientException(t *testing.T) {
function TestInitFileMetadataError (line 393) | func TestInitFileMetadataError(t *testing.T) {
function TestUpdateMetadataWithPresignedUrl (line 435) | func TestUpdateMetadataWithPresignedUrl(t *testing.T) {
function TestUpdateMetadataWithPresignedUrlForDownload (line 514) | func TestUpdateMetadataWithPresignedUrlForDownload(t *testing.T) {
function TestUpdateMetadataWithPresignedUrlError (line 564) | func TestUpdateMetadataWithPresignedUrlError(t *testing.T) {
function TestUpdateMetadataSkipsSecondQueryWithGcsDownscopedToken (line 588) | func TestUpdateMetadataSkipsSecondQueryWithGcsDownscopedToken(t *testing...
function TestUpdateMetadataStillQueriesWithPresignedUrlOnGcs (line 649) | func TestUpdateMetadataStillQueriesWithPresignedUrlOnGcs(t *testing.T) {
function TestUploadWhenFilesystemReadOnlyError (line 728) | func TestUploadWhenFilesystemReadOnlyError(t *testing.T) {
function TestUploadWhenErrorWithResultIsReturned (line 789) | func TestUploadWhenErrorWithResultIsReturned(t *testing.T) {
function createWriteonlyFile (line 844) | func createWriteonlyFile(dir, filename string) error {
function TestUnitUpdateProgress (line 857) | func TestUnitUpdateProgress(t *testing.T) {
function TestCustomTmpDirPath (line 886) | func TestCustomTmpDirPath(t *testing.T) {
function TestReadonlyTmpDirPathShouldFail (line 960) | func TestReadonlyTmpDirPathShouldFail(t *testing.T) {
function TestUploadDownloadOneFileRequireCompress (line 1022) | func TestUploadDownloadOneFileRequireCompress(t *testing.T) {
function TestUploadDownloadOneFileRequireCompressStream (line 1026) | func TestUploadDownloadOneFileRequireCompressStream(t *testing.T) {
function testUploadDownloadOneFile (line 1030) | func testUploadDownloadOneFile(t *testing.T, isStream bool) {
function TestPutGetRegexShouldIgnoreWhitespaceAtTheBeginning (line 1121) | func TestPutGetRegexShouldIgnoreWhitespaceAtTheBeginning(t *testing.T) {
function TestEncryptStream (line 1177) | func TestEncryptStream(t *testing.T) {
function TestEncryptFile (line 1316) | func TestEncryptFile(t *testing.T) {
FILE: file_util.go
type snowflakeFileUtil (line 15) | type snowflakeFileUtil struct
method compressFileWithGzipFromStream (line 23) | func (util *snowflakeFileUtil) compressFileWithGzipFromStream(srcStrea...
method compressFileWithGzip (line 40) | func (util *snowflakeFileUtil) compressFileWithGzip(fileName string, t...
method getDigestAndSizeForStream (line 75) | func (util *snowflakeFileUtil) getDigestAndSizeForStream(stream io.Rea...
method getDigestAndSizeForFile (line 93) | func (util *snowflakeFileUtil) getDigestAndSizeForFile(fileName string...
constant fileChunkSize (line 19) | fileChunkSize = 16 * 4 * 1024
constant readWriteFileMode (line 20) | readWriteFileMode os.FileMode = 0666
type fileMetadata (line 125) | type fileMetadata struct
type fileTransferResultType (line 178) | type fileTransferResultType struct
type fileHeader (line 190) | type fileHeader struct
function getReaderFromBuffer (line 196) | func getReaderFromBuffer(src **bytes.Buffer) io.Reader {
function baseName (line 204) | func baseName(path string) string {
function expandUser (line 216) | func expandUser(path string) (string, error) {
function getDirectory (line 233) | func getDirectory() (string, error) {
FILE: file_util_test.go
function TestGetDigestAndSizeForInvalidDir (line 9) | func TestGetDigestAndSizeForInvalidDir(t *testing.T) {
type tcBaseName (line 23) | type tcBaseName struct
function TestBaseName (line 28) | func TestBaseName(t *testing.T) {
function TestExpandUser (line 45) | func TestExpandUser(t *testing.T) {
FILE: function_wrapper_test.go
function TestGoWrapper (line 9) | func TestGoWrapper(t *testing.T) {
FILE: function_wrappers.go
type GoroutineWrapperFunc (line 8) | type GoroutineWrapperFunc
FILE: gcs_storage_client.go
constant gcsMetadataPrefix (line 17) | gcsMetadataPrefix = "x-goog-meta-"
constant gcsMetadataSfcDigest (line 18) | gcsMetadataSfcDigest = gcsMetadataPrefix + sfcDigest
constant gcsMetadataMatdescKey (line 19) | gcsMetadataMatdescKey = gcsMetadataPrefix + "matdesc"
constant gcsMetadataEncryptionDataProp (line 20) | gcsMetadataEncryptionDataProp = gcsMetadataPrefix + "encryptiondata"
constant gcsFileHeaderDigest (line 21) | gcsFileHeaderDigest = "gcs-file-header-digest"
constant gcsRegionMeCentral2 (line 22) | gcsRegionMeCentral2 = "me-central2"
constant minimumDownloadPartSize (line 23) | minimumDownloadPartSize = 1024 * 1024 * 5
type snowflakeGcsClient (line 26) | type snowflakeGcsClient struct
method createClient (line 36) | func (util *snowflakeGcsClient) createClient(info *execResponseStageIn...
method getFileHeader (line 46) | func (util *snowflakeGcsClient) getFileHeader(ctx context.Context, met...
method uploadFile (line 156) | func (util *snowflakeGcsClient) uploadFile(
method nativeDownloadFile (line 305) | func (util *snowflakeGcsClient) nativeDownloadFile(
method getFileHeaderForDownload (line 377) | func (util *snowflakeGcsClient) getFileHeaderForDownload(ctx context.C...
method downloadFileInParts (line 435) | func (util *snowflakeGcsClient) downloadFileInParts(
method downloadInPartsForStream (line 457) | func (util *snowflakeGcsClient) downloadInPartsForStream(
method downloadInPartsForFile (line 560) | func (util *snowflakeGcsClient) downloadInPartsForFile(
method downloadRangeStream (line 639) | func (util *snowflakeGcsClient) downloadRangeStream(
method downloadRangeBytes (line 679) | func (util *snowflakeGcsClient) downloadRangeBytes(
method downloadFileSinglePart (line 708) | func (util *snowflakeGcsClient) downloadFileSinglePart(
method handleHTTPError (line 778) | func (util *snowflakeGcsClient) handleHTTPError(resp *http.Response, m...
method extractBucketNameAndPath (line 797) | func (util *snowflakeGcsClient) extractBucketNameAndPath(location stri...
method generateFileURL (line 810) | func (util *snowflakeGcsClient) generateFileURL(stageInfo *execRespons...
method isTokenExpired (line 834) | func (util *snowflakeGcsClient) isTokenExpired(resp *http.Response) bo...
type gcsLocation (line 31) | type gcsLocation struct
type gcsAPI (line 151) | type gcsAPI interface
type downloadPart (line 416) | type downloadPart struct
type downloadPartStream (line 423) | type downloadPartStream struct
type downloadJob (line 429) | type downloadJob struct
function newGcsClient (line 838) | func newGcsClient(cfg *Config, telemetry *snowflakeTelemetry) (gcsAPI, e...
FILE: gcs_storage_client_test.go
type tcFileURL (line 18) | type tcFileURL struct
function TestExtractBucketAndPath (line 25) | func TestExtractBucketAndPath(t *testing.T) {
function TestIsTokenExpiredWith401 (line 47) | func TestIsTokenExpiredWith401(t *testing.T) {
function TestIsTokenExpiredWith404 (line 66) | func TestIsTokenExpiredWith404(t *testing.T) {
function TestGenerateFileURL (line 99) | func TestGenerateFileURL(t *testing.T) {
type clientMock (line 147) | type clientMock struct
method Do (line 151) | func (c *clientMock) Do(req *http.Request) (*http.Response, error) {
function TestUploadFileWithGcsUploadFailedError (line 155) | func TestUploadFileWithGcsUploadFailedError(t *testing.T) {
function TestUploadFileWithGcsUploadFailedWithRetry (line 210) | func TestUploadFileWithGcsUploadFailedWithRetry(t *testing.T) {
function TestUploadFileWithGcsUploadFailedWithTokenExpired (line 281) | func TestUploadFileWithGcsUploadFailedWithTokenExpired(t *testing.T) {
function TestDownloadOneFileFromGcsFailed (line 348) | func TestDownloadOneFileFromGcsFailed(t *testing.T) {
function TestDownloadOneFileFromGcsFailedWithRetry (line 394) | func TestDownloadOneFileFromGcsFailedWithRetry(t *testing.T) {
function TestDownloadOneFileFromGcsFailedWithTokenExpired (line 450) | func TestDownloadOneFileFromGcsFailedWithTokenExpired(t *testing.T) {
function TestDownloadOneFileFromGcsFailedWithFileNotFound (line 509) | func TestDownloadOneFileFromGcsFailedWithFileNotFound(t *testing.T) {
function TestGetHeaderTokenExpiredError (line 568) | func TestGetHeaderTokenExpiredError(t *testing.T) {
function TestGetHeaderFileNotFound (line 604) | func TestGetHeaderFileNotFound(t *testing.T) {
function TestGetHeaderPresignedUrlReturns404 (line 640) | func TestGetHeaderPresignedUrlReturns404(t *testing.T) {
function TestGetHeaderReturnsError (line 670) | func TestGetHeaderReturnsError(t *testing.T) {
function TestGetHeaderBadRequest (line 697) | func TestGetHeaderBadRequest(t *testing.T) {
function TestGetHeaderRetryableError (line 734) | func TestGetHeaderRetryableError(t *testing.T) {
function TestUploadStreamFailed (line 770) | func TestUploadStreamFailed(t *testing.T) {
function TestUploadFileWithBadRequest (line 817) | func TestUploadFileWithBadRequest(t *testing.T) {
function TestGetFileHeaderEncryptionData (line 881) | func TestGetFileHeaderEncryptionData(t *testing.T) {
function TestGetFileHeaderEncryptionDataInterfaceConversionError (line 932) | func TestGetFileHeaderEncryptionDataInterfaceConversionError(t *testing....
function TestUploadFileToGcsNoStatus (line 971) | func TestUploadFileToGcsNoStatus(t *testing.T) {
function TestDownloadFileFromGcsError (line 1037) | func TestDownloadFileFromGcsError(t *testing.T) {
function TestDownloadFileWithBadRequest (line 1088) | func TestDownloadFileWithBadRequest(t *testing.T) {
function Test_snowflakeGcsClient_uploadFile (line 1144) | func Test_snowflakeGcsClient_uploadFile(t *testing.T) {
function Test_snowflakeGcsClient_nativeDownloadFile (line 1162) | func Test_snowflakeGcsClient_nativeDownloadFile(t *testing.T) {
function TestGetGcsCustomEndpoint (line 1180) | func TestGetGcsCustomEndpoint(t *testing.T) {
FILE: heartbeat.go
constant minHeartBeatInterval (line 14) | minHeartBeatInterval = 900 * time.Second
constant maxHeartBeatInterval (line 15) | maxHeartBeatInterval = 3600 * time.Second
constant defaultHeartBeatInterval (line 16) | defaultHeartBeatInterval = 3600 * time.Second
function newDefaultHeartBeat (line 19) | func newDefaultHeartBeat(restful *snowflakeRestful) *heartbeat {
function newHeartBeat (line 23) | func newHeartBeat(restful *snowflakeRestful, heartbeatInterval time.Dura...
type heartbeat (line 39) | type heartbeat struct
method run (line 46) | func (hc *heartbeat) run() {
method start (line 65) | func (hc *heartbeat) start() {
method stop (line 73) | func (hc *heartbeat) stop() {
method heartbeatMain (line 81) | func (hc *heartbeat) heartbeatMain() error {
FILE: heartbeat_test.go
function TestUnitPostHeartbeat (line 9) | func TestUnitPostHeartbeat(t *testing.T) {
function TestHeartbeatStartAndStop (line 41) | func TestHeartbeatStartAndStop(t *testing.T) {
function TestHeartbeatIntervalLowerThanMin (line 59) | func TestHeartbeatIntervalLowerThanMin(t *testing.T) {
function TestHeartbeatIntervalHigherThanMax (line 70) | func TestHeartbeatIntervalHigherThanMax(t *testing.T) {
FILE: htap.go
constant queryContextCacheSizeParamName (line 10) | queryContextCacheSizeParamName = "QUERY_CONTEXT_CACHE_SIZE"
constant defaultQueryContextCacheSize (line 11) | defaultQueryContextCacheSize = 5
type queryContext (line 14) | type queryContext struct
type queryContextEntry (line 18) | type queryContextEntry struct
type queryContextCache (line 25) | type queryContextCache struct
method add (line 30) | func (qcc *queryContextCache) add(sc *snowflakeConn, qces ...queryCont...
method prune (line 70) | func (qcc *queryContextCache) prune(size int) {
method getQueryContextCacheSize (line 76) | func (qcc *queryContextCache) getQueryContextCacheSize(sc *snowflakeCo...
FILE: htap_test.go
function TestSortingByPriority (line 16) | func TestSortingByPriority(t *testing.T) {
function TestAddingQcesWithTheSameIdAndLaterTimestamp (line 51) | func TestAddingQcesWithTheSameIdAndLaterTimestamp(t *testing.T) {
function TestAddingQcesWithTheSameIdAndSameTimestamp (line 81) | func TestAddingQcesWithTheSameIdAndSameTimestamp(t *testing.T) {
function TestAddingQcesWithTheSameIdAndEarlierTimestamp (line 111) | func TestAddingQcesWithTheSameIdAndEarlierTimestamp(t *testing.T) {
function TestAddingQcesWithDifferentId (line 141) | func TestAddingQcesWithDifferentId(t *testing.T) {
function TestAddingQueryContextCacheEntry (line 171) | func TestAddingQueryContextCacheEntry(t *testing.T) {
function containsNewEntries (line 200) | func containsNewEntries(entriesAfter []queryContextEntry, entriesBefore ...
function TestPruneBySessionValue (line 216) | func TestPruneBySessionValue(t *testing.T) {
function TestPruneByDefaultValue (line 266) | func TestPruneByDefaultValue(t *testing.T) {
function TestNoQcesClearsCache (line 295) | func TestNoQcesClearsCache(t *testing.T) {
function TestQCCUpdatedAfterQueryResponse (line 316) | func TestQCCUpdatedAfterQueryResponse(t *testing.T) {
function htapTestSnowflakeConn (line 382) | func htapTestSnowflakeConn() *snowflakeConn {
function TestQueryContextCacheDisabled (line 388) | func TestQueryContextCacheDisabled(t *testing.T) {
function TestHybridTablesE2E (line 398) | func TestHybridTablesE2E(t *testing.T) {
function TestHTAPOptimizations (line 485) | func TestHTAPOptimizations(t *testing.T) {
function TestConnIsCleanAfterClose (line 601) | func TestConnIsCleanAfterClose(t *testing.T) {
FILE: internal/arrow/arrow.go
type contextKey (line 13) | type contextKey
constant ctxArrowBatches (line 17) | ctxArrowBatches contextKey = "ARROW_BATCHES"
constant ctxArrowBatchesTimestampOpt (line 18) | ctxArrowBatchesTimestampOpt contextKey = "ARROW_BATCHES_TIMESTAMP_OPTION"
constant ctxArrowBatchesUtf8Validate (line 19) | ctxArrowBatchesUtf8Validate contextKey = "ENABLE_ARROW_BATCHES_UTF8_VALI...
constant ctxHigherPrecision (line 20) | ctxHigherPrecision contextKey = "ENABLE_HIGHER_PRECISION"
type TimestampOption (line 26) | type TimestampOption
constant UseNanosecondTimestamp (line 30) | UseNanosecondTimestamp TimestampOption = iota
constant UseMicrosecondTimestamp (line 32) | UseMicrosecondTimestamp
constant UseMillisecondTimestamp (line 34) | UseMillisecondTimestamp
constant UseSecondTimestamp (line 36) | UseSecondTimestamp
constant UseOriginalTimestamp (line 38) | UseOriginalTimestamp
function EnableArrowBatches (line 44) | func EnableArrowBatches(ctx context.Context) context.Context {
function BatchesEnabled (line 49) | func BatchesEnabled(ctx context.Context) bool {
function WithTimestampOption (line 59) | func WithTimestampOption(ctx context.Context, option TimestampOption) co...
function GetTimestampOption (line 64) | func GetTimestampOption(ctx context.Context) TimestampOption {
function EnableUtf8Validation (line 77) | func EnableUtf8Validation(ctx context.Context) context.Context {
function Utf8ValidationEnabled (line 82) | func Utf8ValidationEnabled(ctx context.Context) bool {
function WithHigherPrecision (line 92) | func WithHigherPrecision(ctx context.Context) context.Context {
function HigherPrecisionEnabled (line 97) | func HigherPrecisionEnabled(ctx context.Context) bool {
type BatchRaw (line 107) | type BatchRaw struct
type BatchDataInfo (line 116) | type BatchDataInfo struct
type BatchDataProvider (line 125) | type BatchDataProvider interface
FILE: internal/compilation/linking_mode.go
type LinkingMode (line 11) | type LinkingMode
method String (line 22) | func (lm *LinkingMode) String() string {
constant StaticLinking (line 15) | StaticLinking LinkingMode = iota
constant DynamicLinking (line 17) | DynamicLinking
constant UnknownLinking (line 19) | UnknownLinking
function CheckDynamicLinking (line 37) | func CheckDynamicLinking() (LinkingMode, error) {
FILE: internal/config/assert_test.go
function maskSecrets (line 15) | func maskSecrets(text string) string {
function assertNilE (line 19) | func assertNilE(t *testing.T, actual any, descriptions ...string) {
function assertNilF (line 24) | func assertNilF(t *testing.T, actual any, descriptions ...string) {
function assertNotNilF (line 29) | func assertNotNilF(t *testing.T, actual any, descriptions ...string) {
function assertEqualE (line 34) | func assertEqualE(t *testing.T, actual any, expected any, descriptions ....
function assertEqualF (line 39) | func assertEqualF(t *testing.T, actual any, expected any, descriptions ....
function assertTrueE (line 44) | func assertTrueE(t *testing.T, actual bool, descriptions ...string) {
function assertTrueF (line 49) | func assertTrueF(t *testing.T, actual bool, descriptions ...string) {
function assertFalseE (line 54) | func assertFalseE(t *testing.T, actual bool, descriptions ...string) {
function fatalOnNonEmpty (line 59) | func fatalOnNonEmpty(t *testing.T, errMsg string) {
function errorOnNonEmpty (line 66) | func errorOnNonEmpty(t *testing.T, errMsg string) {
function formatErrorMessage (line 73) | func formatErrorMessage(errMsg string) string {
function validateNil (line 77) | func validateNil(actual any, descriptions ...string) string {
function validateNotNil (line 85) | func validateNotNil(actual any, descriptions ...string) string {
function validateEqual (line 93) | func validateEqual(actual any, expected any, descriptions ...string) str...
function joinDescriptions (line 104) | func joinDescriptions(descriptions ...string) string {
function isNil (line 108) | func isNil(value any) bool {
FILE: internal/config/auth_type.go
type AuthType (line 11) | type AuthType
method String (line 38) | func (authType AuthType) String() string {
constant AuthTypeSnowflake (line 15) | AuthTypeSnowflake AuthType = iota
constant AuthTypeOAuth (line 17) | AuthTypeOAuth
constant AuthTypeExternalBrowser (line 19) | AuthTypeExternalBrowser
constant AuthTypeOkta (line 21) | AuthTypeOkta
constant AuthTypeJwt (line 23) | AuthTypeJwt
constant AuthTypeTokenAccessor (line 25) | AuthTypeTokenAccessor
constant AuthTypeUsernamePasswordMFA (line 27) | AuthTypeUsernamePasswordMFA
constant AuthTypePat (line 29) | AuthTypePat
constant AuthTypeOAuthAuthorizationCode (line 31) | AuthTypeOAuthAuthorizationCode
constant AuthTypeOAuthClientCredentials (line 33) | AuthTypeOAuthClientCredentials
constant AuthTypeWorkloadIdentityFederation (line 35) | AuthTypeWorkloadIdentityFederation
function DetermineAuthenticatorType (line 68) | func DetermineAuthenticatorType(cfg *Config, value string) error {
FILE: internal/config/config.go
type Config (line 15) | type Config struct
method Validate (line 128) | func (c *Config) Validate() error {
type Param (line 144) | type Param struct
FILE: internal/config/config_bool.go
type Bool (line 4) | type Bool
method String (line 15) | func (cb Bool) String() string {
constant BoolNotSet (line 8) | BoolNotSet Bool = iota
constant BoolTrue (line 10) | BoolTrue
constant BoolFalse (line 12) | BoolFalse
FILE: internal/config/connection_configuration.go
constant snowflakeConnectionName (line 18) | snowflakeConnectionName = "SNOWFLAKE_DEFAULT_CONNECTION_NAME"
constant snowflakeHome (line 19) | snowflakeHome = "SNOWFLAKE_HOME"
constant defaultTokenPath (line 20) | defaultTokenPath = "/snowflake/session/token"
constant othersCanReadFilePermission (line 22) | othersCanReadFilePermission = os.FileMode(0044)
constant othersCanWriteFilePermission (line 23) | othersCanWriteFilePermission = os.FileMode(0022)
constant executableFilePermission (line 24) | executableFilePermission = os.FileMode(0111)
constant skipWarningForReadPermissionsEnv (line 26) | skipWarningForReadPermissionsEnv = "SF_SKIP_WARNING_FOR_READ_PERMISSIONS...
function LoadConnectionConfig (line 32) | func LoadConnectionConfig() (*Config, error) {
function ParseToml (line 78) | func ParseToml(cfg *Config, connectionMap map[string]any) error {
function HandleSingleParam (line 88) | func HandleSingleParam(cfg *Config, key string, value any) error {
function checkParsingError (line 243) | func checkParsingError(err error, key string, value any) error {
function ParseInt (line 258) | func ParseInt(i any) (int, error) {
function ParseBool (line 271) | func ParseBool(i any) (bool, error) {
function parseConfigBool (line 283) | func parseConfigBool(i any) (Bool, error) {
function ParseDuration (line 295) | func ParseDuration(i any) (time.Duration, error) {
function ReadToken (line 309) | func ReadToken(tokenPath string) (string, error) {
function parseString (line 331) | func parseString(i any) (string, error) {
function parseStrings (line 339) | func parseStrings(i any) ([]string, error) {
function GetTomlFilePath (line 348) | func GetTomlFilePath(filePath string) (string, error) {
function getConnectionDSN (line 363) | func getConnectionDSN(dsn string) string {
function ValidateFilePermission (line 371) | func ValidateFilePermission(filePath string) error {
function shouldSkipWarningForReadPermissions (line 408) | func shouldSkipWarningForReadPermissions() bool {
FILE: internal/config/connection_configuration_test.go
function TestTokenFilePermission (line 21) | func TestTokenFilePermission(t *testing.T) {
function TestLoadConnectionConfigForStandardAuth (line 205) | func TestLoadConnectionConfigForStandardAuth(t *testing.T) {
function TestLoadConnectionConfigForOAuth (line 229) | func TestLoadConnectionConfigForOAuth(t *testing.T) {
function TestLoadConnectionConfigForSnakeCaseConfiguration (line 261) | func TestLoadConnectionConfigForSnakeCaseConfiguration(t *testing.T) {
function TestReadTokenValueWithTokenFilePath (line 279) | func TestReadTokenValueWithTokenFilePath(t *testing.T) {
function TestLoadConnectionConfigWitNonExistingDSN (line 317) | func TestLoadConnectionConfigWitNonExistingDSN(t *testing.T) {
function TestParseInt (line 340) | func TestParseInt(t *testing.T) {
function TestParseBool (line 368) | func TestParseBool(t *testing.T) {
function TestParseDuration (line 396) | func TestParseDuration(t *testing.T) {
type paramList (line 424) | type paramList struct
function testGeneratePKCS8String (line 429) | func testGeneratePKCS8String(key *rsa.PrivateKey) string {
function TestParseToml (line 434) | func TestParseToml(t *testing.T) {
function TestParseTomlWithWrongValue (line 491) | func TestParseTomlWithWrongValue(t *testing.T) {
function TestGetTomlFilePath (line 535) | func TestGetTomlFilePath(t *testing.T) {
function assertEqual (line 574) | func assertEqual[T comparable](t *testing.T, got, want T) {
FILE: internal/config/crl_mode.go
type CertRevocationCheckMode (line 9) | type CertRevocationCheckMode
method String (line 21) | func (m CertRevocationCheckMode) String() string {
constant CertRevocationCheckDisabled (line 13) | CertRevocationCheckDisabled CertRevocationCheckMode = iota
constant CertRevocationCheckAdvisory (line 16) | CertRevocationCheckAdvisory
constant CertRevocationCheckEnabled (line 18) | CertRevocationCheckEnabled
function ParseCertRevocationCheckMode (line 35) | func ParseCertRevocationCheckMode(s string) (CertRevocationCheckMode, er...
FILE: internal/config/dsn.go
constant DefaultClientTimeout (line 24) | DefaultClientTimeout = 900 * time.Second
constant DefaultJWTClientTimeout (line 26) | DefaultJWTClientTimeout = 10 * time.Second
constant DefaultLoginTimeout (line 28) | DefaultLoginTimeout = 300 * time.Second
constant DefaultRequestTimeout (line 30) | DefaultRequestTimeout = 0 * time.Second
constant DefaultJWTTimeout (line 32) | DefaultJWTTimeout = 60 * time.Second
constant DefaultExternalBrowserTimeout (line 34) | DefaultExternalBrowserTimeout = 120 * time.Second
constant defaultCloudStorageTimeout (line 35) | defaultCloudStorageTimeout = -1
constant defaultMaxRetryCount (line 36) | defaultMaxRetryCount = 7
constant DefaultDomain (line 38) | DefaultDomain = ".snowflakecomputing.com"
constant CnDomain (line 40) | CnDomain = ".snowflakecomputing.cn"
constant topLevelDomainPrefix (line 41) | topLevelDomainPrefix = ".snowflakecomputing."
constant clientType (line 44) | clientType = "Go"
function GetFromEnv (line 48) | func GetFromEnv(name string, failOnMissing bool) (string, error) {
function DSN (line 59) | func DSN(cfg *Config) (dsn string, err error) {
function ParseDSN (line 308) | func ParseDSN(dsn string) (cfg *Config, err error) {
function applyAccountFromHostIfMissing (line 451) | func applyAccountFromHostIfMissing(cfg *Config) {
function FillMissingConfigParameters (line 466) | func FillMissingConfigParameters(cfg *Config) error {
function extractDomainFromHost (line 587) | func extractDomainFromHost(host string) (domain string, index int) {
function getDomainBasedOnRegion (line 596) | func getDomainBasedOnRegion(region string) string {
function extractRegionFromAccount (line 603) | func extractRegionFromAccount(account string) (region string, posDot int) {
function hostIncludesTopLevelDomain (line 611) | func hostIncludesTopLevelDomain(host string) bool {
function buildHostFromAccountAndRegion (line 615) | func buildHostFromAccountAndRegion(account, region string) string {
function authRequiresUser (line 619) | func authRequiresUser(cfg *Config) bool {
function authRequiresPassword (line 629) | func authRequiresPassword(cfg *Config) bool {
function authRequiresEitherPasswordOrToken (line 640) | func authRequiresEitherPasswordOrToken(cfg *Config) bool {
function authRequiresClientIDAndSecret (line 644) | func authRequiresClientIDAndSecret(cfg *Config) bool {
function transformAccountToHost (line 649) | func transformAccountToHost(cfg *Config) (err error) {
function parseAccountHostPort (line 672) | func parseAccountHostPort(cfg *Config, posAt, posSlash int, dsn string) ...
function parseUserPassword (line 694) | func parseUserPassword(posAt int, dsn string) (user, password string) {
function parseParams (line 707) | func parseParams(cfg *Config, posQuestion int, dsn string) (err error) {
function parseDSNParams (line 720) | func parseDSNParams(cfg *Config, params string) (err error) {
function parseTimeout (line 1061) | func parseTimeout(value string) (time.Duration, error) {
function GetConfigFromEnv (line 1072) | func GetConfigFromEnv(properties []*Param) (*Config, error) {
function parsePrivateKeyFromFile (line 1174) | func parsePrivateKeyFromFile(path string) (*rsa.PrivateKey, error) {
function ExtractAccountName (line 1195) | func ExtractAccountName(rawAccount string) string {
function urlDecodeIfNeeded (line 1203) | func urlDecodeIfNeeded(param string) (decodedParam string) {
function GetToken (line 1212) | func GetToken(c *Config) (string, error) {
function DescribeIdentityAttributes (line 1220) | func DescribeIdentityAttributes(c *Config) string {
function DescribeProxy (line 1226) | func DescribeProxy(c *Config) string {
FILE: internal/config/dsn_test.go
type tcParseDSN (line 26) | type tcParseDSN struct
function TestParseDSN (line 33) | func TestParseDSN(t *testing.T) {
type tcDSN (line 1403) | type tcDSN struct
function TestDSN (line 1409) | func TestDSN(t *testing.T) {
function TestParsePrivateKeyFromFileMissingFile (line 2132) | func TestParsePrivateKeyFromFileMissingFile(t *testing.T) {
function TestParsePrivateKeyFromFileIncorrectData (line 2140) | func TestParsePrivateKeyFromFileIncorrectData(t *testing.T) {
function TestParsePrivateKeyFromFileNotRSAPrivateKey (line 2149) | func TestParsePrivateKeyFromFileNotRSAPrivateKey(t *testing.T) {
function TestParsePrivateKeyFromFile (line 2176) | func TestParsePrivateKeyFromFile(t *testing.T) {
function createTmpFile (line 2197) | func createTmpFile(t *testing.T, fileName string, content []byte) string {
type configParamToValue (line 2205) | type configParamToValue struct
function TestGetConfigFromEnv (line 2210) | func TestGetConfigFromEnv(t *testing.T) {
function checkConfig (line 2251) | func checkConfig(cfg Config, envMap map[string]configParamToValue) error {
function TestConfigValidateTmpDirPath (line 2284) | func TestConfigValidateTmpDirPath(t *testing.T) {
function TestExtractAccountName (line 2293) | func TestExtractAccountName(t *testing.T) {
function TestUrlDecodeIfNeeded (line 2316) | func TestUrlDecodeIfNeeded(t *testing.T) {
function TestDSNParsingWithTLSConfig (line 2329) | func TestDSNParsingWithTLSConfig(t *testing.T) {
function TestTokenAndTokenFilePathValidation (line 2390) | func TestTokenAndTokenFilePathValidation(t *testing.T) {
function TestFillMissingConfigParametersDerivesAccountFromHost (line 2410) | func TestFillMissingConfigParametersDerivesAccountFromHost(t *testing.T) {
function TestFillMissingConfigParametersDerivesAccountFromCNHost (line 2425) | func TestFillMissingConfigParametersDerivesAccountFromCNHost(t *testing....
function TestFillMissingConfigParametersNonSnowflakeHostRequiresAccount (line 2440) | func TestFillMissingConfigParametersNonSnowflakeHostRequiresAccount(t *t...
function generatePKCS8StringSupress (line 2457) | func generatePKCS8StringSupress(key *rsa.PrivateKey) string {
function generatePKCS1String (line 2466) | func generatePKCS1String(key *rsa.PrivateKey) string {
FILE: internal/config/ocsp_mode.go
type OCSPFailOpenMode (line 5) | type OCSPFailOpenMode
constant OCSPFailOpenNotSet (line 9) | OCSPFailOpenNotSet OCSPFailOpenMode = iota
constant OCSPFailOpenTrue (line 11) | OCSPFailOpenTrue
constant OCSPFailOpenFalse (line 13) | OCSPFailOpenFalse
constant ocspModeFailOpen (line 17) | ocspModeFailOpen = "FAIL_OPEN"
constant ocspModeFailClosed (line 18) | ocspModeFailClosed = "FAIL_CLOSED"
constant ocspModeDisabled (line 19) | ocspModeDisabled = "INSECURE"
function OcspMode (line 23) | func OcspMode(c *Config) string {
FILE: internal/config/priv_key.go
function ParsePKCS8PrivateKey (line 11) | func ParsePKCS8PrivateKey(block []byte) (*rsa.PrivateKey, error) {
function MarshalPKCS8PrivateKey (line 23) | func MarshalPKCS8PrivateKey(key *rsa.PrivateKey) ([]byte, error) {
FILE: internal/config/tls_config.go
function ResetTLSConfigRegistry (line 14) | func ResetTLSConfigRegistry() {
function RegisterTLSConfig (line 22) | func RegisterTLSConfig(key string, config *tls.Config) error {
function DeregisterTLSConfig (line 31) | func DeregisterTLSConfig(key string) error {
function GetTLSConfig (line 40) | func GetTLSConfig(key string) (*tls.Config, bool) {
FILE: internal/config/tls_config_test.go
function TestRegisterTLSConfig (line 9) | func TestRegisterTLSConfig(t *testing.T) {
function TestDeregisterTLSConfig (line 31) | func TestDeregisterTLSConfig(t *testing.T) {
function TestGetTLSConfigNonExistent (line 57) | func TestGetTLSConfigNonExistent(t *testing.T) {
function TestRegisterTLSConfigWithCustomRootCAs (line 62) | func TestRegisterTLSConfigWithCustomRootCAs(t *testing.T) {
function TestMultipleTLSConfigs (line 85) | func TestMultipleTLSConfigs(t *testing.T) {
FILE: internal/config/token_accessor.go
type TokenAccessor (line 4) | type TokenAccessor interface
FILE: internal/errors/errors.go
type SnowflakeError (line 11) | type SnowflakeError struct
method Error (line 20) | func (se *SnowflakeError) Error() string {
constant QueryNotExecutingCode (line 39) | QueryNotExecutingCode = "000605"
constant QueryInProgressCode (line 40) | QueryInProgressCode = "333333"
constant QueryInProgressAsyncCode (line 41) | QueryInProgressAsyncCode = "333334"
constant SessionExpiredCode (line 42) | SessionExpiredCode = "390112"
constant InvalidOAuthAccessTokenCode (line 43) | InvalidOAuthAccessTokenCode = "390303"
constant ExpiredOAuthAccessTokenCode (line 44) | ExpiredOAuthAccessTokenCode = "390318"
constant ErrCodeEmptyAccountCode (line 52) | ErrCodeEmptyAccountCode = 260000
constant ErrCodeEmptyUsernameCode (line 54) | ErrCodeEmptyUsernameCode = 260001
constant ErrCodeEmptyPasswordCode (line 56) | ErrCodeEmptyPasswordCode = 260002
constant ErrCodeFailedToParseHost (line 58) | ErrCodeFailedToParseHost = 260003
constant ErrCodeFailedToParsePort (line 60) | ErrCodeFailedToParsePort = 260004
constant ErrCodeIdpConnectionError (line 62) | ErrCodeIdpConnectionError = 260005
constant ErrCodeSSOURLNotMatch (line 64) | ErrCodeSSOURLNotMatch = 260006
constant ErrCodeServiceUnavailable (line 66) | ErrCodeServiceUnavailable = 260007
constant ErrCodeFailedToConnect (line 68) | ErrCodeFailedToConnect = 260008
constant ErrCodeRegionOverlap (line 70) | ErrCodeRegionOverlap = 260009
constant ErrCodePrivateKeyParseError (line 72) | ErrCodePrivateKeyParseError = 260010
constant ErrCodeFailedToParseAuthenticator (line 74) | ErrCodeFailedToParseAuthenticator = 260011
constant ErrCodeClientConfigFailed (line 76) | ErrCodeClientConfigFailed = 260012
constant ErrCodeTomlFileParsingFailed (line 78) | ErrCodeTomlFileParsingFailed = 260013
constant ErrCodeFailedToFindDSNInToml (line 80) | ErrCodeFailedToFindDSNInToml = 260014
constant ErrCodeInvalidFilePermission (line 82) | ErrCodeInvalidFilePermission = 260015
constant ErrCodeEmptyPasswordAndToken (line 84) | ErrCodeEmptyPasswordAndToken = 260016
constant ErrCodeEmptyOAuthParameters (line 86) | ErrCodeEmptyOAuthParameters = 260017
constant ErrMissingAccessATokenButRefreshTokenPresent (line 88) | ErrMissingAccessATokenButRefreshTokenPresent = 260018
constant ErrCodeMissingTLSConfig (line 90) | ErrCodeMissingTLSConfig = 260019
constant ErrFailedToPostQuery (line 95) | ErrFailedToPostQuery = 261000
constant ErrFailedToRenewSession (line 97) | ErrFailedToRenewSession = 261001
constant ErrFailedToCancelQuery (line 99) | ErrFailedToCancelQuery = 261002
constant ErrFailedToCloseSession (line 101) | ErrFailedToCloseSession = 261003
constant ErrFailedToAuth (line 103) | ErrFailedToAuth = 261004
constant ErrFailedToAuthSAML (line 105) | ErrFailedToAuthSAML = 261005
constant ErrFailedToAuthOKTA (line 107) | ErrFailedToAuthOKTA = 261006
constant ErrFailedToGetSSO (line 109) | ErrFailedToGetSSO = 261007
constant ErrFailedToParseResponse (line 111) | ErrFailedToParseResponse = 261008
constant ErrFailedToGetExternalBrowserResponse (line 113) | ErrFailedToGetExternalBrowserResponse = 261009
constant ErrFailedToHeartbeat (line 115) | ErrFailedToHeartbeat = 261010
constant ErrFailedToGetChunk (line 120) | ErrFailedToGetChunk = 262000
constant ErrNonArrowResponseInArrowBatches (line 122) | ErrNonArrowResponseInArrowBatches = 262001
constant ErrNoReadOnlyTransaction (line 127) | ErrNoReadOnlyTransaction = 263000
constant ErrNoDefaultTransactionIsolationLevel (line 129) | ErrNoDefaultTransactionIsolationLevel = 263001
constant ErrInvalidStageFs (line 134) | ErrInvalidStageFs = 264001
constant ErrFailedToDownloadFromStage (line 136) | ErrFailedToDownloadFromStage = 264002
constant ErrFailedToUploadToStage (line 138) | ErrFailedToUploadToStage = 264003
constant ErrInvalidStageLocation (line 140) | ErrInvalidStageLocation = 264004
constant ErrLocalPathNotDirectory (line 142) | ErrLocalPathNotDirectory = 264005
constant ErrFileNotExists (line 144) | ErrFileNotExists = 264006
constant ErrCompressionNotSupported (line 146) | ErrCompressionNotSupported = 264007
constant ErrInternalNotMatchEncryptMaterial (line 148) | ErrInternalNotMatchEncryptMaterial = 264008
constant ErrCommandNotRecognized (line 150) | ErrCommandNotRecognized = 264009
constant ErrFailedToConvertToS3Client (line 152) | ErrFailedToConvertToS3Client = 264010
constant ErrNotImplemented (line 154) | ErrNotImplemented = 264011
constant ErrInvalidPadding (line 156) | ErrInvalidPadding = 264012
constant ErrBindSerialization (line 161) | ErrBindSerialization = 265001
constant ErrBindUpload (line 163) | ErrBindUpload = 265002
constant ErrAsync (line 168) | ErrAsync = 266001
constant ErrNoResultIDs (line 173) | ErrNoResultIDs = 267001
constant ErrInvalidTimestampTz (line 178) | ErrInvalidTimestampTz = 268000
constant ErrInvalidOffsetStr (line 181) | ErrInvalidOffsetStr = 268001
constant ErrInvalidBinaryHexForm (line 183) | ErrInvalidBinaryHexForm = 268002
constant ErrTooHighTimestampPrecision (line 185) | ErrTooHighTimestampPrecision = 268003
constant ErrNullValueInArray (line 187) | ErrNullValueInArray = 268004
constant ErrNullValueInMap (line 189) | ErrNullValueInMap = 268005
constant ErrOCSPStatusRevoked (line 194) | ErrOCSPStatusRevoked = 269001
constant ErrOCSPStatusUnknown (line 196) | ErrOCSPStatusUnknown = 269002
constant ErrOCSPInvalidValidity (line 198) | ErrOCSPInvalidValidity = 269003
constant ErrOCSPNoOCSPResponderURL (line 200) | ErrOCSPNoOCSPResponderURL = 269004
constant ErrQueryStatus (line 205) | ErrQueryStatus = 279001
constant ErrQueryIDFormat (line 207) | ErrQueryIDFormat = 279101
constant ErrQueryReportedError (line 209) | ErrQueryReportedError = 279201
constant ErrQueryIsRunning (line 211) | ErrQueryIsRunning = 279301
constant ErrSessionGone (line 216) | ErrSessionGone = 390111
constant ErrRoleNotExist (line 218) | ErrRoleNotExist = 390189
constant ErrObjectNotExistOrAuthorized (line 220) | ErrObjectNotExistOrAuthorized = 390201
constant ErrMsgFailedToParseHost (line 225) | ErrMsgFailedToParseHost = "failed to parse a host name....
constant ErrMsgFailedToParsePort (line 226) | ErrMsgFailedToParsePort = "failed to parse a port numbe...
constant ErrMsgFailedToParseAuthenticator (line 227) | ErrMsgFailedToParseAuthenticator = "failed to parse an authentic...
constant ErrMsgInvalidOffsetStr (line 228) | ErrMsgInvalidOffsetStr = "offset must be a string cons...
constant ErrMsgInvalidByteArray (line 229) | ErrMsgInvalidByteArray = "invalid byte array: %v"
constant ErrMsgIdpConnectionError (line 230) | ErrMsgIdpConnectionError = "failed to verify URLs. authe...
constant ErrMsgSSOURLNotMatch (line 231) | ErrMsgSSOURLNotMatch = "SSO URL didn't match. expect...
constant ErrMsgFailedToGetChunk (line 232) | ErrMsgFailedToGetChunk = "failed to get a chunk of res...
constant ErrMsgFailedToPostQuery (line 233) | ErrMsgFailedToPostQuery = "failed to POST. HTTP: %v, UR...
constant ErrMsgFailedToRenew (line 234) | ErrMsgFailedToRenew = "failed to renew session. HTT...
constant ErrMsgFailedToCancelQuery (line 235) | ErrMsgFailedToCancelQuery = "failed to cancel query. HTTP...
constant ErrMsgFailedToCloseSession (line 236) | ErrMsgFailedToCloseSession = "failed to close session. HTT...
constant ErrMsgFailedToAuth (line 237) | ErrMsgFailedToAuth = "failed to auth for unknown r...
constant ErrMsgFailedToAuthSAML (line 238) | ErrMsgFailedToAuthSAML = "failed to auth via SAML for ...
constant ErrMsgFailedToAuthOKTA (line 239) | ErrMsgFailedToAuthOKTA = "failed to auth via OKTA for ...
constant ErrMsgFailedToGetSSO (line 240) | ErrMsgFailedToGetSSO = "failed to auth via OKTA for ...
constant ErrMsgFailedToParseResponse (line 241) | ErrMsgFailedToParseResponse = "failed to parse a response f...
constant ErrMsgFailedToGetExternalBrowserResponse (line 242) | ErrMsgFailedToGetExternalBrowserResponse = "failed to get an external br...
constant ErrMsgNoReadOnlyTransaction (line 243) | ErrMsgNoReadOnlyTransaction = "no readonly mode is supported"
constant ErrMsgNoDefaultTransactionIsolationLevel (line 244) | ErrMsgNoDefaultTransactionIsolationLevel = "no default isolation transac...
constant ErrMsgServiceUnavailable (line 245) | ErrMsgServiceUnavailable = "service is unavailable. chec...
constant ErrMsgFailedToConnect (line 246) | ErrMsgFailedToConnect = "failed to connect to db. ver...
constant ErrMsgOCSPStatusRevoked (line 247) | ErrMsgOCSPStatusRevoked = "OCSP revoked: reason:%v, at:%v"
constant ErrMsgOCSPStatusUnknown (line 248) | ErrMsgOCSPStatusUnknown = "OCSP unknown"
constant ErrMsgOCSPInvalidValidity (line 249) | ErrMsgOCSPInvalidValidity = "invalid validity: producedAt...
constant ErrMsgOCSPNoOCSPResponderURL (line 250) | ErrMsgOCSPNoOCSPResponderURL = "no OCSP server is attached t...
constant ErrMsgBindColumnMismatch (line 251) | ErrMsgBindColumnMismatch = "column %v has a different nu...
constant ErrMsgNotImplemented (line 252) | ErrMsgNotImplemented = "not implemented"
constant ErrMsgFeatureNotSupported (line 253) | ErrMsgFeatureNotSupported = "feature is not supported: %v"
constant ErrMsgCommandNotRecognized (line 254) | ErrMsgCommandNotRecognized = "%v command not recognized"
constant ErrMsgLocalPathNotDirectory (line 255) | ErrMsgLocalPathNotDirectory = "the local path is not a dire...
constant ErrMsgFileNotExists (line 256) | ErrMsgFileNotExists = "file does not exist: %v"
constant ErrMsgFailToReadDataFromBuffer (line 257) | ErrMsgFailToReadDataFromBuffer = "failed to read data from buf...
constant ErrMsgInvalidStageFs (line 258) | ErrMsgInvalidStageFs = "destination location type is...
constant ErrMsgInternalNotMatchEncryptMaterial (line 259) | ErrMsgInternalNotMatchEncryptMaterial = "number of downloading files ...
constant ErrMsgFailedToConvertToS3Client (line 260) | ErrMsgFailedToConvertToS3Client = "failed to convert interface ...
constant ErrMsgNoResultIDs (line 261) | ErrMsgNoResultIDs = "no result IDs returned with ...
constant ErrMsgQueryStatus (line 262) | ErrMsgQueryStatus = "server ErrorCode=%s, ErrorMe...
constant ErrMsgInvalidPadding (line 263) | ErrMsgInvalidPadding = "invalid padding on input"
constant ErrMsgClientConfigFailed (line 264) | ErrMsgClientConfigFailed = "client configuration failed:...
constant ErrMsgNullValueInArray (line 265) | ErrMsgNullValueInArray = "for handling null values in ...
constant ErrMsgNullValueInMap (line 266) | ErrMsgNullValueInMap = "for handling null values in ...
constant ErrMsgFailedToParseTomlFile (line 267) | ErrMsgFailedToParseTomlFile = "failed to parse toml file. t...
constant ErrMsgFailedToFindDSNInTomlFile (line 268) | ErrMsgFailedToFindDSNInTomlFile = "failed to find DSN in toml f...
constant ErrMsgInvalidWritablePermissionToFile (line 269) | ErrMsgInvalidWritablePermissionToFile = "file '%v' is writable by gro...
constant ErrMsgInvalidExecutablePermissionToFile (line 270) | ErrMsgInvalidExecutablePermissionToFile = "file '%v' is executable — th...
constant ErrMsgNonArrowResponseInArrowBatches (line 271) | ErrMsgNonArrowResponseInArrowBatches = "arrow batches enabled, but t...
constant ErrMsgMissingTLSConfig (line 272) | ErrMsgMissingTLSConfig = "TLS config not found: %v"
function ErrEmptyAccount (line 276) | func ErrEmptyAccount() *SnowflakeError {
function ErrEmptyUsername (line 284) | func ErrEmptyUsername() *SnowflakeError {
function ErrEmptyPassword (line 292) | func ErrEmptyPassword() *SnowflakeError {
function ErrEmptyPasswordAndToken (line 300) | func ErrEmptyPasswordAndToken() *SnowflakeError {
function ErrEmptyOAuthParameters (line 308) | func ErrEmptyOAuthParameters() *SnowflakeError {
function ErrRegionConflict (line 316) | func ErrRegionConflict() *SnowflakeError {
function ErrFailedToParseAuthenticator (line 324) | func ErrFailedToParseAuthenticator() *SnowflakeError {
function ErrUnknownError (line 332) | func ErrUnknownError() *SnowflakeError {
function ErrNullValueInArrayError (line 342) | func ErrNullValueInArrayError() *SnowflakeError {
function ErrNullValueInMapError (line 350) | func ErrNullValueInMapError() *SnowflakeError {
function ErrNonArrowResponseForArrowBatches (line 358) | func ErrNonArrowResponseForArrowBatches(queryID string) *SnowflakeError {
FILE: internal/logger/accessor.go
function GetLogger (line 20) | func GetLogger() sflog.SFLogger {
function SetLogger (line 41) | func SetLogger(providedLogger SFLogger) error {
function init (line 72) | func init() {
function CreateDefaultLogger (line 80) | func CreateDefaultLogger() sflog.SFLogger {
FILE: internal/logger/accessor_test.go
function TestLoggerConfiguration (line 13) | func TestLoggerConfiguration(t *testing.T) {
function TestLoggerSecretMasking (line 37) | func TestLoggerSecretMasking(t *testing.T) {
function TestLoggerAllMethods (line 65) | func TestLoggerAllMethods(t *testing.T) {
function TestLoggerLevelFiltering (line 106) | func TestLoggerLevelFiltering(t *testing.T) {
function TestLogEntry (line 139) | func TestLogEntry(t *testing.T) {
function TestLogEntryWithFields (line 172) | func TestLogEntryWithFields(t *testing.T) {
function TestSetOutput (line 209) | func TestSetOutput(t *testing.T) {
function TestLogEntryWithContext (line 243) | func TestLogEntryWithContext(t *testing.T) {
FILE: internal/logger/context.go
function SetLogKeys (line 20) | func SetLogKeys(keys []any) {
function GetLogKeys (line 29) | func GetLogKeys() []any {
function RegisterLogContextHook (line 40) | func RegisterLogContextHook(key string, hook ClientLogContextHook) {
function GetClientLogContextHooks (line 51) | func GetClientLogContextHooks() map[string]ClientLogContextHook {
function extractContextFields (line 61) | func extractContextFields(ctx context.Context) []slog.Attr {
FILE: internal/logger/easy_logging_support.go
function CloseFileOnLoggerReplace (line 10) | func CloseFileOnLoggerReplace(sflog any, file *os.File) error {
function IsEasyLoggingLogger (line 20) | func IsEasyLoggingLogger(sflog any) bool {
function unwrapToEasyLoggingLogger (line 26) | func unwrapToEasyLoggingLogger(sflog any) (EasyLoggingSupport, bool) {
FILE: internal/logger/level_filtering.go
type levelFilteringLogger (line 14) | type levelFilteringLogger struct
method Unwrap (line 22) | func (l *levelFilteringLogger) Unwrap() any {
method shouldLog (line 28) | func (l *levelFilteringLogger) shouldLog(messageLevel sflog.Level) bool {
method Tracef (line 41) | func (l *levelFilteringLogger) Tracef(format string, args ...any) {
method Debugf (line 48) | func (l *levelFilteringLogger) Debugf(format string, args ...any) {
method Infof (line 55) | func (l *levelFilteringLogger) Infof(format string, args ...any) {
method Warnf (line 62) | func (l *levelFilteringLogger) Warnf(format string, args ...any) {
method Errorf (line 69) | func (l *levelFilteringLogger) Errorf(format string, args ...any) {
method Fatalf (line 76) | func (l *levelFilteringLogger) Fatalf(format string, args ...any) {
method Trace (line 81) | func (l *levelFilteringLogger) Trace(msg string) {
method Debug (line 88) | func (l *levelFilteringLogger) Debug(msg string) {
method Info (line 95) | func (l *levelFilteringLogger) Info(msg string) {
method Warn (line 102) | func (l *levelFilteringLogger) Warn(msg string) {
method Error (line 109) | func (l *levelFilteringLogger) Error(msg string) {
method Fatal (line 116) | func (l *levelFilteringLogger) Fatal(msg string) {
method WithField (line 121) | func (l *levelFilteringLogger) WithField(key string, value any) sflog....
method WithFields (line 129) | func (l *levelFilteringLogger) WithFields(fields map[string]any) sflog...
method WithContext (line 137) | func (l *levelFilteringLogger) WithContext(ctx context.Context) sflog....
method SetLogLevel (line 146) | func (l *levelFilteringLogger) SetLogLevel(level string) error {
method SetLogLevelInt (line 150) | func (l *levelFilteringLogger) SetLogLevelInt(level sflog.Level) error {
method GetLogLevel (line 154) | func (l *levelFilteringLogger) GetLogLevel() string {
method GetLogLevelInt (line 158) | func (l *levelFilteringLogger) GetLogLevelInt() sflog.Level {
method SetOutput (line 162) | func (l *levelFilteringLogger) SetOutput(output io.Writer) {
method SetHandler (line 167) | func (l *levelFilteringLogger) SetHandler(handler slog.Handler) error {
function newLevelFilteringLogger (line 33) | func newLevelFilteringLogger(inner SFLogger) SFLogger {
type levelFilteringEntry (line 175) | type levelFilteringEntry struct
method Tracef (line 181) | func (e *levelFilteringEntry) Tracef(format string, args ...any) {
method Debugf (line 188) | func (e *levelFilteringEntry) Debugf(format string, args ...any) {
method Infof (line 195) | func (e *levelFilteringEntry) Infof(format string, args ...any) {
method Warnf (line 202) | func (e *levelFilteringEntry) Warnf(format string, args ...any) {
method Errorf (line 209) | func (e *levelFilteringEntry) Errorf(format string, args ...any) {
method Fatalf (line 216) | func (e *levelFilteringEntry) Fatalf(format string, args ...any) {
method Trace (line 221) | func (e *levelFilteringEntry) Trace(msg string) {
method Debug (line 228) | func (e *levelFilteringEntry) Debug(msg string) {
method Info (line 235) | func (e *levelFilteringEntry) Info(msg string) {
method Warn (line 242) | func (e *levelFilteringEntry) Warn(msg string) {
method Error (line 249) | func (e *levelFilteringEntry) Error(msg string) {
method Fatal (line 256) | func (e *levelFilteringEntry) Fatal(msg string) {
FILE: internal/logger/optional_interfaces.go
type EasyLoggingSupport (line 7) | type EasyLoggingSupport interface
type Unwrapper (line 13) | type Unwrapper interface
FILE: internal/logger/proxy.go
type Proxy (line 14) | type Proxy struct
method Tracef (line 20) | func (p *Proxy) Tracef(format string, args ...any) {
method Debugf (line 25) | func (p *Proxy) Debugf(format string, args ...any) {
method Infof (line 30) | func (p *Proxy) Infof(format string, args ...any) {
method Warnf (line 35) | func (p *Proxy) Warnf(format string, args ...any) {
method Errorf (line 40) | func (p *Proxy) Errorf(format string, args ...any) {
method Fatalf (line 45) | func (p *Proxy) Fatalf(format string, args ...any) {
method Trace (line 50) | func (p *Proxy) Trace(msg string) {
method Debug (line 55) | func (p *Proxy) Debug(msg string) {
method Info (line 60) | func (p *Proxy) Info(msg string) {
method Warn (line 65) | func (p *Proxy) Warn(msg string) {
method Error (line 70) | func (p *Proxy) Error(msg string) {
method Fatal (line 75) | func (p *Proxy) Fatal(msg string) {
method WithField (line 80) | func (p *Proxy) WithField(key string, value any) sflog.LogEntry {
method WithFields (line 85) | func (p *Proxy) WithFields(fields map[string]any) sflog.LogEntry {
method WithContext (line 90) | func (p *Proxy) WithContext(ctx context.Context) sflog.LogEntry {
method SetLogLevel (line 95) | func (p *Proxy) SetLogLevel(level string) error {
method SetLogLevelInt (line 100) | func (p *Proxy) SetLogLevelInt(level sflog.Level) error {
method GetLogLevel (line 105) | func (p *Proxy) GetLogLevel() string {
method GetLogLevelInt (line 110) | func (p *Proxy) GetLogLevelInt() sflog.Level {
method SetOutput (line 115) | func (p *Proxy) SetOutput(output io.Writer) {
method SetHandler (line 121) | func (p *Proxy) SetHandler(handler slog.Handler) error {
function NewLoggerProxy (line 133) | func NewLoggerProxy() sflog.SFLogger {
FILE: internal/logger/secret_detector.go
constant awsKeyPattern (line 8) | awsKeyPattern = `(?i)(aws_key_id|aws_secret_key|access_key_id|s...
constant awsTokenPattern (line 9) | awsTokenPattern = `(?i)(accessToken|tempToken|keySecret)"\s*:\s*"...
constant sasTokenPattern (line 10) | sasTokenPattern = `(?i)(sig|signature|AWSAccessKeyId|password|pas...
constant privateKeyPattern (line 11) | privateKeyPattern = `(?im)-----BEGIN PRIVATE KEY-----\\n([a-z0-9/+=...
constant privateKeyDataPattern (line 12) | privateKeyDataPattern = `(?i)"privateKeyData": "([a-z0-9/+=\\n]{10,})"`
constant privateKeyParamPattern (line 13) | privateKeyParamPattern = `(?i)privateKey=([A-Za-z0-9/+=_%-]+)(&|$|\s)`
constant connectionTokenPattern (line 14) | connectionTokenPattern = `(?i)(token|assertion content)([\'\"\s:=]+)([a-...
constant passwordPattern (line 15) | passwordPattern = `(?i)(password|pwd)([\'\"\s:=]+)([a-z0-9!\"#\$%...
constant dsnPasswordPattern (line 16) | dsnPasswordPattern = `([^/:]+):([^@/:]{3,})@`
constant clientSecretPattern (line 17) | clientSecretPattern = `(?i)(clientSecret)([\'\"\s:= ]+)([a-z0-9!\"#\$...
constant jwtTokenPattern (line 18) | jwtTokenPattern = `(?i)(jwt|bearer)[\s:=]*([a-zA-Z0-9_-]+\.[a-zA-...
type patternAndReplace (line 21) | type patternAndReplace struct
function MaskSecrets (line 41) | func MaskSecrets(text string) (masked string) {
FILE: internal/logger/secret_detector_test.go
constant longToken (line 12) | longToken = "_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=UKyJH9DS=nFzzWnfZKGV+C7GopWC...
constant randomPassword (line 17) | randomPassword = `Fh[+2J~AcqeqW%?`
constant falsePositiveToken (line 18) | falsePositiveToken = "2020-04-30 23:06:04,069 - MainThread auth.py:397" +
function generateTestJWT (line 24) | func generateTestJWT(t *testing.T) string {
function TestSecretsDetector (line 47) | func TestSecretsDetector(t *testing.T) {
FILE: internal/logger/secret_masking.go
type secretMaskingLogger (line 13) | type secretMaskingLogger struct
method Unwrap (line 21) | func (l *secretMaskingLogger) Unwrap() any {
method maskValue (line 35) | func (l *secretMaskingLogger) maskValue(value any) any {
method maskString (line 48) | func (l *secretMaskingLogger) maskString(value string) string {
method Tracef (line 53) | func (l *secretMaskingLogger) Tracef(format string, args ...any) {
method Debugf (line 59) | func (l *secretMaskingLogger) Debugf(format string, args ...any) {
method Infof (line 65) | func (l *secretMaskingLogger) Infof(format string, args ...any) {
method Warnf (line 71) | func (l *secretMaskingLogger) Warnf(format string, args ...any) {
method Errorf (line 77) | func (l *secretM
Condensed preview — 394 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,834K chars).
[
{
"path": ".cursor/rules/overall-guidelines.mdc",
"chars": 952,
"preview": "---\nalwaysApply: true\n---\n\n# Cursor Rules for Go Snowflake Driver\n\n## General Development Standards\n\n### Code Quality\n- "
},
{
"path": ".cursor/rules/testing.mdc",
"chars": 2712,
"preview": "---\nalwaysApply: true\n---\n\n# Cursor Rules for Go Test Files\n\nThis file automatically applies when working on `*_test.go`"
},
{
"path": ".github/CODEOWNERS",
"chars": 318,
"preview": "* @snowflakedb/Client\n\n/transport.go @snowflakedb/pki-oversight @snowflakedb/Client\n/crl.go @snowflakedb/pki-oversight @"
},
{
"path": ".github/ISSUE_TEMPLATE/BUG_REPORT.md",
"chars": 1249,
"preview": "---\nname: Bug Report 🐞\nabout: Something isn't working as expected? Here is the right place to report.\nlabels: bug\n---\n\n\n"
},
{
"path": ".github/ISSUE_TEMPLATE/FEATURE_REQUEST.md",
"chars": 446,
"preview": "---\nname: Feature Request 💡\nabout: Suggest a new idea for the project.\nlabels: feature\n---\n\n<!--\nIf you need urgent assi"
},
{
"path": ".github/ISSUE_TEMPLATE.md",
"chars": 572,
"preview": "### Issue description\nTell us what should happen and what happens instead\n\n### Example code\n```go\nIf possible, please en"
},
{
"path": ".github/PULL_REQUEST_TEMPLATE.md",
"chars": 244,
"preview": "### Description\n\nSNOW-XXX Please explain the changes you made here.\n\n### Checklist\n- [ ] Added proper logging (if possib"
},
{
"path": ".github/repo_meta.yaml",
"chars": 117,
"preview": "point_of_contact: @snowflakedb/client\nproduction: true\ncode_owners_file_present: false\njira_area: Developer Platform\n"
},
{
"path": ".github/secret_scanning.yml",
"chars": 36,
"preview": "paths-ignore:\n - \"**/test_data/**\"\n"
},
{
"path": ".github/workflows/build-test.yml",
"chars": 16496,
"preview": "name: Build and Test\n\npermissions:\n contents: read\n\non:\n push:\n branches:\n - master\n tags:\n - v*\n pul"
},
{
"path": ".github/workflows/changelog.yml",
"chars": 531,
"preview": "name: Changelog Check\n\non:\n pull_request:\n types: [opened, synchronize, labeled, unlabeled]\n\njobs:\n check_change_lo"
},
{
"path": ".github/workflows/cla_bot.yml",
"chars": 1080,
"preview": "name: \"CLA Assistant\"\non:\n issue_comment:\n types: [created]\n pull_request_target:\n types: [opened,closed,synchro"
},
{
"path": ".github/workflows/jira_close.yml",
"chars": 1567,
"preview": "name: Jira closure\n\non:\n issues:\n types: [closed, deleted]\n\njobs:\n close-issue:\n runs-on: ubuntu-latest\n step"
},
{
"path": ".github/workflows/jira_comment.yml",
"chars": 1030,
"preview": "name: Jira comment\n\non:\n issue_comment:\n types: [created]\n\njobs:\n comment-issue:\n runs-on: ubuntu-latest\n ste"
},
{
"path": ".github/workflows/jira_issue.yml",
"chars": 3736,
"preview": "name: Jira creation\n\non:\n issues:\n types: [opened]\n issue_comment:\n types: [created]\n\njobs:\n create-issue:\n "
},
{
"path": ".github/workflows/semgrep.yml",
"chars": 286,
"preview": "name: Run semgrep checks\n\non:\n pull_request:\n branches: [main, master]\n\npermissions:\n contents: read\n\njobs:\n run"
},
{
"path": ".gitignore",
"chars": 366,
"preview": "*.DS_Store\n.idea/\n.vscode/\nparameters*.json\nparameters*.bat\n*.p8\ncoverage.txt\nfuzz-*/\n/select1\n/selectmany\n/verifycert\nw"
},
{
"path": ".golangci.yml",
"chars": 683,
"preview": "version: \"2\"\n\nrun:\n tests: true\n\nlinters:\n exclusions:\n rules:\n - path: \"_test.go\"\n linters:\n "
},
{
"path": ".pre-commit-config.yaml",
"chars": 188,
"preview": "repos:\n- repo: git@github.com:snowflakedb/casec_precommit.git # SSH\n# - repo: https://github.com/snowflakedb/casec_preco"
},
{
"path": ".windsurf/rules/go.md",
"chars": 506,
"preview": "---\ntrigger: glob\ndescription: \nglobs: **/*.go\n---\n\n# Go files rules\n\n## General\n\n1. Unless it's necessary or told other"
},
{
"path": "CHANGELOG.md",
"chars": 11345,
"preview": "# Changelog\n\n## Upcoming release\n\nBug fixes:\n\n- Fixed empty `Account` when connecting with programmatic `Config` and `da"
},
{
"path": "CONTRIBUTING.md",
"chars": 816,
"preview": "# Contributing Guidelines\n\n## Reporting Issues\n\nBefore creating a new Issue, please check first if a similar Issue [alre"
},
{
"path": "Jenkinsfile",
"chars": 3669,
"preview": "@Library('pipeline-utils')\nimport com.snowflake.DevEnvUtils\nimport groovy.json.JsonOutput\n\n\ntimestamps {\n node('high-me"
},
{
"path": "LICENSE",
"chars": 11384,
"preview": " Apache License\n Version 2.0, January 2004\n "
},
{
"path": "Makefile",
"chars": 969,
"preview": "NAME:=gosnowflake\nVERSION:=$(shell git describe --tags --abbrev=0)\nREVISION:=$(shell git rev-parse --short HEAD)\nCOVFLAG"
},
{
"path": "README.md",
"chars": 10669,
"preview": "## Migrating to v2\n\n**Version 2.0.0 of the Go Snowflake Driver was released on March 3rd, 2026.** This major version inc"
},
{
"path": "SECURITY.md",
"chars": 307,
"preview": "# Security Policy\n\nPlease refer to the Snowflake [HackerOne program](https://hackerone.com/snowflake?type=team) for our "
},
{
"path": "aaa_test.go",
"chars": 325,
"preview": "package gosnowflake\n\nimport (\n\t\"testing\"\n)\n\nfunc TestShowServerVersion(t *testing.T) {\n\trunDBTest(t, func(dbt *DBTest) {"
},
{
"path": "arrow_chunk.go",
"chars": 2457,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/base64\"\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/qu"
},
{
"path": "arrow_stream.go",
"chars": 6814,
"preview": "package gosnowflake\n\nimport (\n\t\"bufio\"\n\t\"bytes\"\n\t\"compress/gzip\"\n\t\"context\"\n\t\"encoding/base64\"\n\t\"fmt\"\n\t\"io\"\n\t\"maps\"\n\t\"ne"
},
{
"path": "arrow_test.go",
"chars": 17797,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"fmt\"\n\t\"math/big\"\n\t\"reflect\"\n\t\"strings\"\n\t\"testing\"\n\t\"time\"\n\n\t\"github."
},
{
"path": "arrowbatches/batches.go",
"chars": 3717,
"preview": "package arrowbatches\n\nimport (\n\t\"cmp\"\n\t\"context\"\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/query\"\n\t\"github.com/sn"
},
{
"path": "arrowbatches/batches_test.go",
"chars": 15410,
"preview": "package arrowbatches\n\nimport (\n\t\"context\"\n\t\"crypto/rsa\"\n\t\"crypto/x509\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"encoding"
},
{
"path": "arrowbatches/context.go",
"chars": 1043,
"preview": "package arrowbatches\n\nimport (\n\t\"context\"\n\n\tia \"github.com/snowflakedb/gosnowflake/v2/internal/arrow\"\n)\n\n// Timestamp op"
},
{
"path": "arrowbatches/converter.go",
"chars": 11477,
"preview": "package arrowbatches\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/query\"\n\t\"github.com/sn"
},
{
"path": "arrowbatches/converter_test.go",
"chars": 45338,
"preview": "package arrowbatches\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/query\"\n\t\"github.com/sn"
},
{
"path": "arrowbatches/schema.go",
"chars": 4593,
"preview": "package arrowbatches\n\nimport (\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/query\"\n\t\"github.com/snowflakedb/gosnowfl"
},
{
"path": "assert_test.go",
"chars": 10993,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"errors\"\n\t\"fmt\"\n\t\"math\"\n\t\"reflect\"\n\t\"regexp\"\n\t\"slices\"\n\t\"strings\"\n\t\"testing\"\n\t\"t"
},
{
"path": "async.go",
"chars": 6116,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"net/url\"\n\t\"strconv\"\n\t\"time\"\n)\n\nfunc (sr *snowflakeRestful) processAsyn"
},
{
"path": "async_test.go",
"chars": 6263,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"fmt\"\n\t\"testing\"\n)\n\nfunc TestAsyncMode(t *testing.T) {\n\tctx :="
},
{
"path": "auth.go",
"chars": 29483,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"crypto/sha256\"\n\t\"crypto/x509\"\n\t\"encoding/base64\"\n\t\"encoding/json\"\n\t\"errors\"\n\t"
},
{
"path": "auth_generic_test_methods_test.go",
"chars": 1125,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"testing\"\n)\n\nfunc getAuthTestConfigFromEnv() (*Config, error) {\n\treturn GetC"
},
{
"path": "auth_oauth.go",
"chars": 15040,
"preview": "package gosnowflake\n\nimport (\n\t\"bufio\"\n\t\"bytes\"\n\t\"cmp\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"html\"\n\t\"io\"\n\t\"net\""
},
{
"path": "auth_oauth_test.go",
"chars": 33653,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"errors\"\n\tsfconfig \"github.com/snowflakedb/gosnowflake/v2/inte"
},
{
"path": "auth_test.go",
"chars": 43988,
"preview": "package gosnowflake\n\nimport (\n\t\"cmp\"\n\t\"context\"\n\t\"crypto/rand\"\n\t\"crypto/rsa\"\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"errors\"\n"
},
{
"path": "auth_wif.go",
"chars": 21373,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"crypto/sha256\"\n\t\"encoding/base64\"\n\t\"encoding/hex\"\n\t\"encoding/json\"\n\t"
},
{
"path": "auth_wif_test.go",
"chars": 27758,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"encoding/base64\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\t\"os"
},
{
"path": "auth_with_external_browser_test.go",
"chars": 5753,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"fmt\"\n\t\"log\"\n\t\"os/exec\"\n\t\"sync\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc Tes"
},
{
"path": "auth_with_keypair_test.go",
"chars": 1460,
"preview": "package gosnowflake\n\nimport (\n\t\"crypto/rsa\"\n\t\"fmt\"\n\t\"golang.org/x/crypto/ssh\"\n\t\"os\"\n\t\"testing\"\n)\n\nfunc TestKeypairSucces"
},
{
"path": "auth_with_mfa_test.go",
"chars": 2426,
"preview": "package gosnowflake\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"log\"\n\t\"os/exec\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestMfaSuccessful(t *test"
},
{
"path": "auth_with_oauth_okta_authorization_code_test.go",
"chars": 3552,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"sync\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestOauthOktaAuthorizationCodeSuccessful(t *test"
},
{
"path": "auth_with_oauth_okta_client_credentials_test.go",
"chars": 2159,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestOauthOktaClientCredentialsSuccessful(t *testing.T"
},
{
"path": "auth_with_oauth_snowflake_authorization_code_test.go",
"chars": 5320,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"sync\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestOauthSnowflakeAuthorizationCodeSuccessful(t "
},
{
"path": "auth_with_oauth_snowflake_authorization_code_wildcards_test.go",
"chars": 4049,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"sync\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestOauthSnowflakeAuthorizationCodeWildcardsSucc"
},
{
"path": "auth_with_oauth_test.go",
"chars": 3198,
"preview": "package gosnowflake\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestOauthSucc"
},
{
"path": "auth_with_okta_test.go",
"chars": 1517,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"net/url\"\n\t\"testing\"\n)\n\nfunc TestOktaSuccessful(t *testing.T) {\n\tcfg := setupOktaT"
},
{
"path": "auth_with_pat_test.go",
"chars": 3814,
"preview": "package gosnowflake\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"log\"\n\t\"strings\"\n\t\"testing\"\n\t\"time\"\n)\n\ntype PatToken struct {\n\tNam"
},
{
"path": "authexternalbrowser.go",
"chars": 10978,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/base64\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\terrors2 \"github."
},
{
"path": "authexternalbrowser_test.go",
"chars": 6415,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\tsfconfig \"github.com/snowflakedb/gosnowflake/v2/internal/conf"
},
{
"path": "authokta.go",
"chars": 11014,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowflake/v2/intern"
},
{
"path": "authokta_test.go",
"chars": 13648,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strconv\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestUnit"
},
{
"path": "azure_storage_client.go",
"chars": 11982,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"cmp\"\n\t\"context\"\n\t\"crypto/md5\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/htt"
},
{
"path": "azure_storage_client_test.go",
"chars": 22637,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"crypto/md5\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"io\"\n\t\"net/http\"\n\t\"os\"\n\t\"pat"
},
{
"path": "bind_uploader.go",
"chars": 10285,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"fmt\"\n\t\"github.com/snowflakedb"
},
{
"path": "bindings_test.go",
"chars": 54626,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"fmt\"\n\t\"log\"\n\t\"math\"\n\t\"math/bi"
},
{
"path": "chunk.go",
"chars": 6587,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"fmt\"\n\t\"io\"\n\n\t\"unicode\"\n\t\"unicode/utf16\"\n\t\"unicode/utf8\"\n)\n\nconst (\n\tdefaultChun"
},
{
"path": "chunk_downloader.go",
"chars": 17733,
"preview": "package gosnowflake\n\nimport (\n\t\"bufio\"\n\t\"compress/gzip\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\terrors2 \"github.co"
},
{
"path": "chunk_downloader_test.go",
"chars": 2086,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql/driver\"\n\t\"testing\"\n\n\tia \"github.com/snowflakedb/gosnowflake/v2/i"
},
{
"path": "chunk_test.go",
"chars": 16705,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"database/sql/driver\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\terrors2 \"git"
},
{
"path": "ci/_init.sh",
"chars": 543,
"preview": "#!/usr/bin/env -e\n\nexport PLATFORM=$(echo $(uname) | tr '[:upper:]' '[:lower:]')\n# Use the internal Docker Registry\nexpo"
},
{
"path": "ci/build.bat",
"chars": 456,
"preview": "REM Format and Lint Golang driver\n\n@echo off\nsetlocal EnableDelayedExpansion\n\necho [INFO] Download tools\nwhere golint\nIF"
},
{
"path": "ci/build.sh",
"chars": 177,
"preview": "#!/bin/bash\n#\n# Format, lint and WhiteSource scan Golang driver\n#\nset -e\nset -o pipefail\n\nCI_DIR=\"$( cd \"$( dirname \"${B"
},
{
"path": "ci/container/test_authentication.sh",
"chars": 776,
"preview": "#!/bin/bash -e\n\nset -o pipefail\n\nexport AUTH_PARAMETER_FILE=./.github/workflows/parameters_aws_auth_tests.json\neval $(jq"
},
{
"path": "ci/container/test_component.sh",
"chars": 209,
"preview": "#!/bin/bash\n\nset -e\nset -o pipefail\n\nCI_SCRIPTS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nTOPDIR=$(cd $CI_"
},
{
"path": "ci/docker/rockylinux9/Dockerfile",
"chars": 2741,
"preview": "ARG BASE_IMAGE=rockylinux:9\nFROM $BASE_IMAGE\n\nARG TARGETARCH\n\n# Update all packages first (including glibc) to get lates"
},
{
"path": "ci/gofix.sh",
"chars": 3453,
"preview": "#!/usr/bin/env bash\nset -euo pipefail\n\nCI_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\ncd \"$CI_DIR/..\"\n\nGOOS_"
},
{
"path": "ci/image/Dockerfile",
"chars": 498,
"preview": "FROM artifactory.int.snowflakecomputing.com/development-chainguard-virtual/snowflake.com/go:1.24.0-dev\n\nUSER root\n\nRUN a"
},
{
"path": "ci/image/build.sh",
"chars": 395,
"preview": "#!/usr/bin/env bash -e\n#\n# Build Docker images\n#\nset -o pipefail\nTHIS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && p"
},
{
"path": "ci/image/scripts/entrypoint.sh",
"chars": 332,
"preview": "#!/bin/bash -ex\n# Add local user\n# Either use the LOCAL_USER_ID if passed in at runtime or\n# fallback\n\nUSER_ID=${LOCAL_U"
},
{
"path": "ci/image/update.sh",
"chars": 845,
"preview": "#!/usr/bin/env bash -e\n#\n# Build Docker images\n#\nset -o pipefail\nTHIS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && p"
},
{
"path": "ci/scripts/.gitignore",
"chars": 25,
"preview": "wiremock-standalone-*.jar"
},
{
"path": "ci/scripts/README.md",
"chars": 574,
"preview": "# Refreshing wiremock test cert\n\nPassword for CA is `password`.\n\n```bash\nopenssl x509 -req -in wiremock.csr -CA ca.crt -"
},
{
"path": "ci/scripts/ca.crt",
"chars": 2086,
"preview": "-----BEGIN CERTIFICATE-----\nMIIF1zCCA7+gAwIBAgIUXh8f8hI5mKqCrUJaDn0zF6qGmw0wDQYJKoZIhvcNAQEL\nBQAwezELMAkGA1UEBhMCUEwxFDA"
},
{
"path": "ci/scripts/ca.key",
"chars": 3446,
"preview": "-----BEGIN ENCRYPTED PRIVATE KEY-----\nMIIJtTBfBgkqhkiG9w0BBQ0wUjAxBgkqhkiG9w0BBQwwJAQQR+n/YtOhd0h7AmwV\nGU9glAICCAAwDAYIK"
},
{
"path": "ci/scripts/ca.srl",
"chars": 41,
"preview": "54587BDD05D4BE6A6D8852CA7FDB421189EA1C6D\n"
},
{
"path": "ci/scripts/execute_tests.sh",
"chars": 3111,
"preview": "#!/bin/bash\n#\n# Build and Test Golang driver\n#\nset -e\nset -o pipefail\nCI_SCRIPTS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0"
},
{
"path": "ci/scripts/hang_webserver.py",
"chars": 3518,
"preview": "#!/usr/bin/env python3\nimport sys\nfrom http.server import BaseHTTPRequestHandler,HTTPServer\nfrom socketserver import Thr"
},
{
"path": "ci/scripts/login_internal_docker.sh",
"chars": 386,
"preview": "#!/bin/bash -e\n#\n# Login the Internal Docker Registry\n#\nif [[ -z \"$GITHUB_ACTIONS\" ]]; then\n echo \"[INFO] Login the i"
},
{
"path": "ci/scripts/run_wiremock.sh",
"chars": 700,
"preview": "#!/usr/bin/env bash\n\nSCRIPT_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\ncd $SCRIPT_DIR\n\nif [[ \"$1\" == \"--ec"
},
{
"path": "ci/scripts/setup_connection_parameters.sh",
"chars": 1090,
"preview": "#!/bin/bash -e\n#\n# Set connection parameters\n#\nCI_SCRIPTS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nif [[ "
},
{
"path": "ci/scripts/setup_gpg.sh",
"chars": 410,
"preview": "#!/bin/bash\n\n# GPG setup script for creating unique GPG home directory\n\nsetup_gpg_home() {\n # Create unique GPG home di"
},
{
"path": "ci/scripts/wiremock-ecdsa-pub.key",
"chars": 178,
"preview": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEX3j37DbAKoO6Cwn0TsoMcsVXEF52\nlDa2tEHX2kMoxLExE4cgBipPyHgw"
},
{
"path": "ci/scripts/wiremock-ecdsa.crt",
"chars": 1444,
"preview": "-----BEGIN CERTIFICATE-----\nMIID/jCCAeagAwIBAgIUVFh73QXUvmptiFLKf9tCEYnqHG0wDQYJKoZIhvcNAQEL\nBQAwezELMAkGA1UEBhMCUEwxFDA"
},
{
"path": "ci/scripts/wiremock-ecdsa.csr",
"chars": 412,
"preview": "-----BEGIN CERTIFICATE REQUEST-----\nMIH5MIGsAgEAMHkxCzAJBgNVBAYTAlBMMRQwEgYDVQQIDAtNYXpvd2llY2tpZTEP\nMA0GA1UEBwwGV2Fyc2F"
},
{
"path": "ci/scripts/wiremock-ecdsa.key",
"chars": 119,
"preview": "-----BEGIN PRIVATE KEY-----\nMC4CAQAwBQYDK2VwBCIEICQI1T3B7DZ45py/Oa4fEjhdz3kMDlRFXvY8vv9DA5Io\n-----END PRIVATE KEY-----\n"
},
{
"path": "ci/scripts/wiremock.crt",
"chars": 2114,
"preview": "-----BEGIN CERTIFICATE-----\nMIIF7TCCA9WgAwIBAgIUVFh73QXUvmptiFLKf9tCEYnqHGwwDQYJKoZIhvcNAQEL\nBQAwezELMAkGA1UEBhMCUEwxFDA"
},
{
"path": "ci/scripts/wiremock.csr",
"chars": 1708,
"preview": "-----BEGIN CERTIFICATE REQUEST-----\nMIIEszCCApsCAQAwbjELMAkGA1UEBhMCUEwxFDASBgNVBAgMC01hem93aWVja2ll\nMQ8wDQYDVQQHDAZXYXJ"
},
{
"path": "ci/scripts/wiremock.key",
"chars": 3268,
"preview": "-----BEGIN PRIVATE KEY-----\nMIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQDDKVbEUa1u/1Bc\n32/1n5IxebSiCQTc3v/dKgtsYXk"
},
{
"path": "ci/scripts/wiremock.v3.ext",
"chars": 216,
"preview": "authorityKeyIdentifier=keyid,issuer\nbasicConstraints=CA:FALSE\nkeyUsage = digitalSignature, nonRepudiation, keyEncipherme"
},
{
"path": "ci/test.bat",
"chars": 4918,
"preview": "REM Test Golang driver\n\nsetlocal EnableDelayedExpansion\n\nstart /b python ci\\scripts\\hang_webserver.py 12345\n\ncurl -O htt"
},
{
"path": "ci/test.sh",
"chars": 1835,
"preview": "#!/bin/bash\n#\n# Test Golang driver\n#\nset -e\nset -o pipefail\n\nCI_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n"
},
{
"path": "ci/test_authentication.sh",
"chars": 1408,
"preview": "#!/bin/bash -e\n\nset -o pipefail\n\nexport THIS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nsource \"$THIS_DIR/s"
},
{
"path": "ci/test_revocation.sh",
"chars": 1645,
"preview": "#!/bin/bash\n#\n# Test certificate revocation validation using the revocation-validation framework.\n#\n\nset -o pipefail\n\nTH"
},
{
"path": "ci/test_rockylinux9.sh",
"chars": 1886,
"preview": "#!/bin/bash -e\n#\n# Test GoSnowflake driver in Rocky Linux 9\n# NOTES:\n# - Go version MUST be passed in as the first arg"
},
{
"path": "ci/test_rockylinux9_docker.sh",
"chars": 1609,
"preview": "#!/bin/bash -e\n# Test GoSnowflake driver in Rocky Linux 9 Docker\n# NOTES:\n# - Go version MUST be specified as first ar"
},
{
"path": "ci/test_wif.sh",
"chars": 4114,
"preview": "#!/bin/bash -e\n\nset -o pipefail\n\nexport THIS_DIR=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nexport RSA_KEY_PATH"
},
{
"path": "ci/wif/parameters/parameters_wif.json.gpg",
"chars": 241,
"preview": "\r\u0004\t\u0003\b\u0017'QW\u0005-q\b\u0001d\rYêőTkv5F2яyD`\u0016mw\u0016GL\u0016Wݽd_\\'q6T*'9\u0001_֮t\u000b%?wļHbZvfwӘ].\u0016\u0016\u0017h\\Θ_&uzT[&1G\u00140=)}V\u0016;\u0005j ==X;E\u0003\u0012(\u000f,k7I&\u0014\u0019@ŕZק\u0018З\u0010\u001b$>"
},
{
"path": "ci/wif/parameters/rsa_wif_aws_azure.gpg",
"chars": 209,
"preview": "\r\u0004\t\u0003\b髃6K\u00015%܇ټ飐\u000f|eRk]n\u000fc-TloB,\u0011ܐ͒R7B]<ER-|\u00044u'K2<\u0013:BC&fԳeX%i9\u00060@LG\u0011$a\u0004hOnTejB@\u001a)~Nto&\u0003Ȍ\u0012\u0013ru/i\u001cuq%0\u0012!]ݮ2-lw\u0018`!ʚgF߬\u0019i\u0010DDĘX"
},
{
"path": "ci/wif/parameters/rsa_wif_gcp.gpg",
"chars": 209,
"preview": "\r\u0004\t\u0003\b髃6K\u00015%܇ټ飐\u000f|eRk]n\u000fc-TloB,\u0011ܐ͒R7B]<ER-|\u00044u'K2<\u0013:BC&fԳeX%i9\u00060@LG\u0011$a\u0004hOnTejB@\u001a)~Nto&\u0003Ȍ\u0012\u0013ru/i\u001cuq%0\u0012!]ݮ2-lw\u0018`!ʚgF߬\u0019i\u0010DDĘX"
},
{
"path": "client.go",
"chars": 973,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"time\"\n)\n\n// InternalClient is implemented by HTTPClien"
},
{
"path": "client_configuration.go",
"chars": 5593,
"preview": "package gosnowflake\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\t\"path\"\n\t\"path/filepath\"\n\t\"strings\"\n)\n\n// log leve"
},
{
"path": "client_configuration_test.go",
"chars": 10809,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"path\"\n\t\"path/filepath\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestFindConfigFileFrom"
},
{
"path": "client_test.go",
"chars": 1504,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"testing\"\n)\n\ntype DummyTransport struct {\n\tpostRequests"
},
{
"path": "cmd/arrow/.gitignore",
"chars": 66,
"preview": "arrow_batches\ntransform_batches_to_rows/transform_batches_to_rows\n"
},
{
"path": "cmd/arrow/Makefile",
"chars": 228,
"preview": "SUBDIRS := batches transform_batches_to_rows\nTARGETS := all install run lint fmt\n\n$(TARGETS): subdirs\n\nsubdirs: $(SUBDIR"
},
{
"path": "cmd/arrow/transform_batches_to_rows/Makefile",
"chars": 203,
"preview": "include ../../../gosnowflake.mak\nCMD_TARGET=transform_batches_to_rows\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n#"
},
{
"path": "cmd/arrow/transform_batches_to_rows/transform_batches_to_rows.go",
"chars": 1987,
"preview": "package main\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"errors\"\n\t\"flag\"\n\t\"io\"\n\t\"log\"\n\n\tsf \"github.com"
},
{
"path": "cmd/logger/Makefile",
"chars": 181,
"preview": "include ../../gosnowflake.mak\nCMD_TARGET=logger\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n## Lint\nlint: clint\n\n##"
},
{
"path": "cmd/logger/logger.go",
"chars": 1333,
"preview": "package main\n\nimport (\n\t\"bytes\"\n\tsf \"github.com/snowflakedb/gosnowflake/v2\"\n\t\"log\"\n\t\"strings\"\n)\n\nfunc main() {\n\tbuf := &"
},
{
"path": "cmd/mfa/Makefile",
"chars": 178,
"preview": "include ../../gosnowflake.mak\nCMD_TARGET=mfa\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n## Lint\nlint: clint\n\n## Fo"
},
{
"path": "cmd/mfa/mfa.go",
"chars": 1525,
"preview": "package main\n\nimport (\n\t\"database/sql\"\n\t\"flag\"\n\t\"fmt\"\n\t\"log\"\n\n\tsf \"github.com/snowflakedb/gosnowflake/v2\"\n)\n\nfunc main()"
},
{
"path": "cmd/programmatic_access_token/.gitignore",
"chars": 4,
"preview": "pat\n"
},
{
"path": "cmd/programmatic_access_token/Makefile",
"chars": 178,
"preview": "include ../../gosnowflake.mak\nCMD_TARGET=pat\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n## Lint\nlint: clint\n\n## Fo"
},
{
"path": "cmd/programmatic_access_token/pat.go",
"chars": 1375,
"preview": "// you have to configure PAT on your user\n\npackage main\n\nimport (\n\t\"database/sql\"\n\t\"flag\"\n\t\"fmt\"\n\tsf \"github.com/snowfla"
},
{
"path": "cmd/tomlfileconnection/.gitignore",
"chars": 21,
"preview": "tomlfileconnection.go"
},
{
"path": "cmd/tomlfileconnection/Makefile",
"chars": 193,
"preview": "include ../../gosnowflake.mak\nCMD_TARGET=tomlfileconnection\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n## Lint\nlin"
},
{
"path": "cmd/variant/Makefile",
"chars": 182,
"preview": "include ../../gosnowflake.mak\nCMD_TARGET=variant\n\n## Install\ninstall: cinstall\n\n## Run\nrun: crun\n\n## Lint\nlint: clint\n\n#"
},
{
"path": "cmd/variant/insertvariantobject.go",
"chars": 3733,
"preview": "package main\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"flag\"\n\t\"fmt\"\n\t\"log\"\n\t\"strconv\"\n\t\"time\"\n\n\tsf \"github"
},
{
"path": "codecov.yml",
"chars": 62,
"preview": "parsers:\n go:\n partials_as_hits: true\n\nignore:\n - \"cmd/\"\n"
},
{
"path": "connection.go",
"chars": 19414,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"ne"
},
{
"path": "connection_configuration_test.go",
"chars": 2080,
"preview": "package gosnowflake\n\nimport (\n\t\"database/sql\"\n\ttoml \"github.com/BurntSushi/toml\"\n\t\"os\"\n\t\"strconv\"\n\t\"testing\"\n)\n\n// TODO "
},
{
"path": "connection_test.go",
"chars": 32922,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\terror"
},
{
"path": "connection_util.go",
"chars": 9685,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"maps\"\n\t\"runtime\"\n\t\"strconv\"\n\t\"strings\"\n\t\"sync"
},
{
"path": "connectivity_diagnosis.go",
"chars": 12639,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"crypto/x509\"\n\t\"encoding/json\"\n\t\"encoding/pem\"\n\t\"errors\"\n\t\"fmt\"\n\tsfconfig \"git"
},
{
"path": "connectivity_diagnosis_test.go",
"chars": 27246,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"crypto/tls\"\n\t\"encoding/pem\"\n\t\"fmt\"\n\tsfconfig \"github.com/snowflakedb"
},
{
"path": "connector.go",
"chars": 1101,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql/driver\"\n\tsfconfig \"github.com/snowflakedb/gosnowflake/v2/interna"
},
{
"path": "connector_test.go",
"chars": 3490,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"database/sql/driver\"\n\tsfconfig \"github.com/snowflakedb/gosnowflake/v"
},
{
"path": "converter.go",
"chars": 100142,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"encoding/hex\"\n\t\"encoding/json"
},
{
"path": "converter_test.go",
"chars": 37113,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowfl"
},
{
"path": "crl.go",
"chars": 21274,
"preview": "package gosnowflake\n\nimport (\n\t\"crypto/x509\"\n\t\"encoding/asn1\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"os\"\n\t\"path"
},
{
"path": "crl_test.go",
"chars": 52839,
"preview": "package gosnowflake\n\nimport (\n\t\"cmp\"\n\t\"context\"\n\t\"crypto/rand\"\n\t\"crypto/rsa\"\n\t\"crypto/sha256\"\n\t\"crypto/x509\"\n\t\"crypto/x5"
},
{
"path": "ctx_test.go",
"chars": 1376,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"fmt\"\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestCtxVal(t *testing.T) {\n\ttype "
},
{
"path": "datatype.go",
"chars": 5427,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowflak"
},
{
"path": "datatype_test.go",
"chars": 2209,
"preview": "package gosnowflake\n\nimport (\n\t\"database/sql/driver\"\n\t\"fmt\"\n\t\"github.com/snowflakedb/gosnowflake/v2/internal/errors\"\n\t\"g"
},
{
"path": "datetime.go",
"chars": 4458,
"preview": "package gosnowflake\n\nimport (\n\t\"errors\"\n\t\"regexp\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\nvar incorrectSecondsFractionRegex = r"
},
{
"path": "datetime_test.go",
"chars": 4478,
"preview": "package gosnowflake\n\nimport (\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestSnowflakeFormatToGoFormatUnitTest(t *testing.T) {\n\tlocation"
},
{
"path": "doc.go",
"chars": 73740,
"preview": "/*\nPackage gosnowflake is a pure Go Snowflake driver for the database/sql package.\n\nClients can use the database/sql pac"
},
{
"path": "driver.go",
"chars": 4098,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\tsfconfig \"github.com/snowflakedb/gosnow"
},
{
"path": "driver_ocsp_test.go",
"chars": 24236,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"crypto/tls\"\n\t\"crypto/x509\"\n\t\"database/sql\"\n\t\"errors\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net"
},
{
"path": "driver_test.go",
"chars": 66171,
"preview": "package gosnowflake\n\nimport (\n\t\"cmp\"\n\t\"context\"\n\t\"crypto/rsa\"\n\t\"database/sql\"\n\t\"database/sql/driver\"\n\t\"encoding/base64\"\n"
},
{
"path": "dsn.go",
"chars": 1465,
"preview": "package gosnowflake\n\nimport (\n\tsfconfig \"github.com/snowflakedb/gosnowflake/v2/internal/config\"\n)\n\n// Type aliases — re-"
},
{
"path": "easy_logging.go",
"chars": 5994,
"preview": "package gosnowflake\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\terrors2 \"github.com/snowflakedb/gosnowflake/v2/internal/errors\"\n\t\"io\"\n\t\""
},
{
"path": "easy_logging_test.go",
"chars": 8186,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\t\"path\"\n\t\"path/filepath\"\n\t\"strings\"\n\t\"sync\"\n\t\"testing\"\n\n\tloggerint"
},
{
"path": "encrypt_util.go",
"chars": 14318,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"crypto/aes\"\n\t\"crypto/cipher\"\n\t\"crypto/rand\"\n\t\"encoding/base64\"\n\t\"encoding/json\""
},
{
"path": "encrypt_util_test.go",
"chars": 9004,
"preview": "package gosnowflake\n\nimport (\n\t\"bufio\"\n\t\"compress/gzip\"\n\t\"encoding/base64\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"math/rand\"\n\t\"os\"\n\t\"o"
},
{
"path": "errors.go",
"chars": 13617,
"preview": "package gosnowflake\n\nimport (\n\t\"fmt\"\n\t\"runtime/debug\"\n\t\"strconv\"\n\t\"time\"\n\n\tsferrors \"github.com/snowflakedb/gosnowflake/"
},
{
"path": "errors_test.go",
"chars": 2065,
"preview": "package gosnowflake\n\nimport (\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestErrorMessage(t *testing.T) {\n\te := &SnowflakeError{\n\t\tNu"
},
{
"path": "file_compression_type.go",
"chars": 2313,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"strings\"\n\n\t\"github.com/gabriel-vasile/mimetype\"\n)\n\ntype compressionType struct "
},
{
"path": "file_transfer_agent.go",
"chars": 39764,
"preview": "package gosnowflake\n\n//lint:file-ignore U1000 Ignore all unused code\n\nimport (\n\t\"bytes\"\n\t\"cmp\"\n\t\"context\"\n\t\"database/sql"
},
{
"path": "file_transfer_agent_test.go",
"chars": 38484,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/url\"\n\t\"os\"\n\t\"path\"\n\t\"path/filepath\"\n\t\"reg"
},
{
"path": "file_util.go",
"chars": 5480,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"compress/gzip\"\n\t\"crypto/sha256\"\n\t\"encoding/base64\"\n\t\"io\"\n\t\"net/url\"\n\t\"os\"\n\t\"pat"
},
{
"path": "file_util_test.go",
"chars": 1357,
"preview": "package gosnowflake\n\nimport (\n\t\"os/user\"\n\t\"path/filepath\"\n\t\"testing\"\n)\n\nfunc TestGetDigestAndSizeForInvalidDir(t *testin"
},
{
"path": "function_wrapper_test.go",
"chars": 1210,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"sync\"\n\t\"testing\"\n)\n\nfunc TestGoWrapper(t *testing.T) {\n\tvar (\n\t\tgoWrapperCall"
},
{
"path": "function_wrappers.go",
"chars": 772,
"preview": "package gosnowflake\n\nimport \"context\"\n\n// GoroutineWrapperFunc is used to wrap goroutines. This is useful if the caller "
},
{
"path": "gcs_storage_client.go",
"chars": 24483,
"preview": "package gosnowflake\n\nimport (\n\t\"cmp\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"os\"\n\t\"strconv\"\n\t\""
},
{
"path": "gcs_storage_client_test.go",
"chars": 38615,
"preview": "package gosnowflake\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"os\"\n\t"
},
{
"path": "go.mod",
"chars": 3710,
"preview": "module github.com/snowflakedb/gosnowflake/v2\n\ngo 1.24.0\n\nrequire (\n\tgithub.com/99designs/keyring v1.2.2\n\tgithub.com/Azur"
},
{
"path": "go.sum",
"chars": 17756,
"preview": "github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs=\ngith"
},
{
"path": "gosnowflake.mak",
"chars": 884,
"preview": "## Setup\nSHELL := /bin/bash\nSRC = $(shell find . -type f -name '*.go' -not -path \"./vendor/*\")\n\nsetup:\n\t@which golint &>"
},
{
"path": "heartbeat.go",
"chars": 4079,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"time\"\n)\n\nconst (\n\tminHea"
},
{
"path": "heartbeat_test.go",
"chars": 2759,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestUnitPostHeartbeat(t *testing.T) {\n\trunSnowflakeC"
},
{
"path": "htap.go",
"chars": 2175,
"preview": "package gosnowflake\n\nimport (\n\t\"sort\"\n\t\"strconv\"\n\t\"sync\"\n)\n\nconst (\n\tqueryContextCacheSizeParamName = \"QUERY_CONTEXT_CAC"
},
{
"path": "htap_test.go",
"chars": 20869,
"preview": "package gosnowflake\n\nimport (\n\t\"context\"\n\t\"database/sql/driver\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/url\"\n\t\"reflect\"\n\t\"strconv\""
},
{
"path": "internal/arrow/arrow.go",
"chars": 3929,
"preview": "package arrow\n\nimport (\n\t\"context\"\n\t\"time\"\n\n\t\"github.com/apache/arrow-go/v18/arrow\"\n\t\"github.com/apache/arrow-go/v18/arr"
},
{
"path": "internal/compilation/cgo_disabled.go",
"chars": 111,
"preview": "//go:build !cgo\n\npackage compilation\n\n// CgoEnabled is set to false if CGO is disabled.\nvar CgoEnabled = false\n"
},
{
"path": "internal/compilation/cgo_enabled.go",
"chars": 107,
"preview": "//go:build cgo\n\npackage compilation\n\n// CgoEnabled is set to true if CGO is enabled.\nvar CgoEnabled = true\n"
},
{
"path": "internal/compilation/linking_mode.go",
"chars": 1563,
"preview": "package compilation\n\nimport (\n\t\"debug/elf\"\n\t\"fmt\"\n\t\"runtime\"\n\t\"sync\"\n)\n\n// LinkingMode describes what linking mode was d"
},
{
"path": "internal/compilation/minicore_disabled.go",
"chars": 355,
"preview": "//go:build minicore_disabled\n\npackage compilation\n\n// MinicoreEnabled is set to false when building with -tags minicore_"
},
{
"path": "internal/compilation/minicore_enabled.go",
"chars": 411,
"preview": "//go:build !minicore_disabled\n\npackage compilation\n\n// MinicoreEnabled is set to true by default. Build with -tags minic"
},
{
"path": "internal/config/assert_test.go",
"chars": 3071,
"preview": "package config\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n\t\"slices\"\n\t\"strings\"\n\t\"testing\"\n\t\"time\"\n\n\tsflogger \"github.com/snowflakedb/go"
},
{
"path": "internal/config/auth_type.go",
"chars": 4366,
"preview": "package config\n\nimport (\n\t\"net/url\"\n\t\"strings\"\n\n\tsferrors \"github.com/snowflakedb/gosnowflake/v2/internal/errors\"\n)\n\n// "
},
{
"path": "internal/config/config.go",
"chars": 7374,
"preview": "// Package config provides the Config struct which contains all configuration parameters for the driver and a Validate m"
},
{
"path": "internal/config/config_bool.go",
"chars": 545,
"preview": "package config\n\n// Bool is a type to represent true or false in the Config\ntype Bool uint8\n\nconst (\n\t// BoolNotSet repre"
},
{
"path": "internal/config/connection_configuration.go",
"chars": 11826,
"preview": "package config\n\nimport (\n\t\"encoding/base64\"\n\t\"errors\"\n\t\"os\"\n\tpath \"path/filepath\"\n\t\"runtime\"\n\t\"strconv\"\n\t\"strings\"\n\t\"tim"
},
{
"path": "internal/config/connection_configuration_test.go",
"chars": 18599,
"preview": "package config\n\nimport (\n\t\"bytes\"\n\t\"crypto/rand\"\n\t\"crypto/rsa\"\n\t\"crypto/x509\"\n\t\"encoding/base64\"\n\t\"fmt\"\n\t\"os\"\n\tpath \"pat"
},
{
"path": "internal/config/crl_mode.go",
"chars": 1468,
"preview": "package config\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n)\n\n// CertRevocationCheckMode defines the modes for certificate revocation ch"
},
{
"path": "internal/config/dsn.go",
"chars": 34622,
"preview": "package config\n\nimport (\n\t\"crypto/rsa\"\n\t\"crypto/x509\"\n\t\"encoding/base64\"\n\t\"encoding/pem\"\n\t\"errors\"\n\t\"fmt\"\n\t\"net/url\"\n\t\"o"
},
{
"path": "internal/config/dsn_test.go",
"chars": 98500,
"preview": "package config\n\nimport (\n\t\"crypto/ecdsa\"\n\t\"crypto/elliptic\"\n\tcr \"crypto/rand\"\n\t\"crypto/rsa\"\n\t\"crypto/tls\"\n\t\"crypto/x509\""
},
{
"path": "internal/config/ocsp_mode.go",
"chars": 925,
"preview": "package config\n\n// OCSPFailOpenMode is OCSP fail open mode. OCSPFailOpenTrue by default and may\n// set to ocspModeFailCl"
},
{
"path": "internal/config/priv_key.go",
"chars": 889,
"preview": "package config\n\nimport (\n\t\"crypto/rsa\"\n\t\"crypto/x509\"\n\n\tsferrors \"github.com/snowflakedb/gosnowflake/v2/internal/errors\""
},
{
"path": "internal/config/tls_config.go",
"chars": 1226,
"preview": "package config\n\nimport (\n\t\"crypto/tls\"\n\t\"sync\"\n)\n\nvar (\n\ttlsConfigLock sync.RWMutex\n\ttlsConfigRegistry = make(map[st"
},
{
"path": "internal/config/tls_config_test.go",
"chars": 3556,
"preview": "package config\n\nimport (\n\t\"crypto/tls\"\n\t\"crypto/x509\"\n\t\"testing\"\n)\n\nfunc TestRegisterTLSConfig(t *testing.T) {\n\t// Clean"
},
{
"path": "internal/config/token_accessor.go",
"chars": 260,
"preview": "package config\n\n// TokenAccessor manages the session token and master token\ntype TokenAccessor interface {\n\tGetTokens() "
},
{
"path": "internal/errors/errors.go",
"chars": 18712,
"preview": "// Package errors defines error types and error codes for the Snowflake driver.\n// It includes both errors returned by t"
},
{
"path": "internal/logger/accessor.go",
"chars": 2730,
"preview": "package logger\n\nimport (\n\t\"errors\"\n\t\"log\"\n\t\"sync\"\n\n\t\"github.com/snowflakedb/gosnowflake/v2/sflog\"\n)\n\n// LoggerAccessor a"
},
{
"path": "internal/logger/accessor_test.go",
"chars": 6835,
"preview": "package logger_test\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/snowflakedb/gosnowflake/v2/interna"
},
{
"path": "internal/logger/context.go",
"chars": 2312,
"preview": "package logger\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"maps\"\n\t\"sync\"\n)\n\n// Storage for log keys and hooks (single sour"
},
{
"path": "internal/logger/easy_logging_support.go",
"chars": 1364,
"preview": "package logger\n\nimport (\n\t\"fmt\"\n\t\"os\"\n)\n\n// CloseFileOnLoggerReplace closes a log file when the logger is replaced.\n// T"
},
{
"path": "internal/logger/interfaces.go",
"chars": 517,
"preview": "package logger\n\nimport (\n\t\"github.com/snowflakedb/gosnowflake/v2/sflog\"\n)\n\n// Re-export types from sflog package to avoi"
},
{
"path": "internal/logger/level_filtering.go",
"chars": 6150,
"preview": "package logger\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"github.com/snowflakedb/gosnowflake/v2/sflog\"\n\t\"io\"\n\t\"log/slog\"\n)\n\n// lev"
}
]
// ... and 194 more files (download for full content)
About this extraction
This page contains the full source code of the snowflakedb/gosnowflake GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 394 files (2.4 MB), approximately 659.5k tokens, and a symbol index with 3380 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.