diff --git a/.github/workflows/_extension_distribution.yml b/.github/workflows/_extension_distribution.yml index fd95453..414ddcc 100644 --- a/.github/workflows/_extension_distribution.yml +++ b/.github/workflows/_extension_distribution.yml @@ -11,15 +11,6 @@ name: Extension distribution on: workflow_call: - secrets: - VCPKG_CACHING_AWS_ACCESS_KEY_ID: - required: false - VCPKG_CACHING_AWS_SECRET_ACCESS_KEY: - required: false - VCPKG_CACHING_AWS_ENDPOINT_URL: - required: false - VCPKG_CACHING_AWS_DEFAULT_REGION: - required: false inputs: # The name with which the extension will be built extension_name: @@ -62,11 +53,11 @@ on: vcpkg_binary_sources: required: false type: string - default: '' + default: "" vcpkg_extra_dependencies: required: false type: string - default: '' + default: "" # Override the default script producing the matrices. Allows specifying custom matrices. matrix_parse_script: required: false @@ -161,10 +152,10 @@ on: reduced_ci_mode: required: false type: string - default: 'auto' + default: "auto" env: - VCPKG_BINARY_SOURCES: ${{inputs.vcpkg_binary_sources == '' && 'clear;http,https://vcpkg-cache.duckdb.org,read' || inputs.vcpkg_binary_sources }} + VCPKG_BINARY_SOURCES: ${{inputs.vcpkg_binary_sources == '' && 'clear' || inputs.vcpkg_binary_sources }} jobs: generate_matrix: @@ -179,7 +170,7 @@ jobs: - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} @@ -227,12 +218,6 @@ jobs: VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_target_triplet }} VCPKG_HOST_TRIPLET: ${{ matrix.vcpkg_host_triplet }} - # VCPKG caching - AWS_ACCESS_KEY_ID: ${{ secrets.VCPKG_CACHING_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.VCPKG_CACHING_AWS_SECRET_ACCESS_KEY }} - AWS_ENDPOINT_URL: ${{ secrets.VCPKG_CACHING_AWS_ENDPOINT_URL }} - AWS_DEFAULT_REGION: ${{ secrets.VCPKG_CACHING_AWS_DEFAULT_REGION }} - AWS_REQUEST_CHECKSUM_CALCULATION: when_required # Misc GEN: ninja BUILD_SHELL: ${{ inputs.build_duckdb_shell && '1' || '0' }} @@ -273,19 +258,19 @@ jobs: repository: ${{ inputs.override_repository }} ref: ${{ inputs.override_ref }} fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout current repository if: ${{inputs.override_repository == ''}} with: fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} @@ -298,7 +283,7 @@ jobs: if: ${{inputs.set_caller_as_duckdb}} uses: actions/checkout@v4 with: - path: 'duckdb' + path: "duckdb" fetch-tags: true fetch-depth: 0 @@ -320,7 +305,7 @@ jobs: - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} fetch-depth: 0 @@ -340,11 +325,6 @@ jobs: cat <<-EOF > docker_env.txt VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES USE_MERGED_VCPKG_MANIFEST=${{ inputs.use_merged_vcpkg_manifest }} - AWS_ACCESS_KEY_ID=${{ secrets.VCPKG_CACHING_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY=${{ secrets.VCPKG_CACHING_AWS_SECRET_ACCESS_KEY }} - AWS_ENDPOINT_URL=${{ secrets.VCPKG_CACHING_AWS_ENDPOINT_URL }} - AWS_DEFAULT_REGION=${{ secrets.VCPKG_CACHING_AWS_DEFAULT_REGION }} - AWS_REQUEST_CHECKSUM_CALCULATION=when_required VCPKG_TARGET_TRIPLET=${{ matrix.vcpkg_target_triplet }} BUILD_SHELL=${{ inputs.build_duckdb_shell && '1' || '0' }} OPENSSL_ROOT_DIR=/duckdb_build_dir/build/${{ inputs.build_type }}/vcpkg_installed/${{ matrix.vcpkg_target_triplet }} @@ -433,11 +413,13 @@ jobs: env: BQ_TEST_PROJECT: ${{ secrets.BQ_TEST_PROJECT }} BQ_TEST_DATASET: ${{ secrets.BQ_TEST_DATASET }}_${{ matrix.duckdb_arch }} + BQ_SERVICE_ACCOUNT_JSON: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }} run: | echo "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" > /tmp/gcloud-service-key.json docker run \ -e BQ_TEST_PROJECT=$BQ_TEST_PROJECT \ -e BQ_TEST_DATASET=$BQ_TEST_DATASET \ + -e BQ_SERVICE_ACCOUNT_JSON=$BQ_SERVICE_ACCOUNT_JSON \ -e GOOGLE_APPLICATION_CREDENTIALS=/auth/gcloud-service-key.json \ -v $GOOGLE_APPLICATION_CREDENTIALS:/auth/gcloud-service-key.json \ --env-file=docker_env.txt \ @@ -452,6 +434,7 @@ jobs: LINUX_CI_IN_DOCKER: 0 BQ_TEST_PROJECT: ${{ secrets.BQ_TEST_PROJECT }} BQ_TEST_DATASET: ${{ secrets.BQ_TEST_DATASET }}_${{ matrix.duckdb_arch }} + BQ_SERVICE_ACCOUNT_JSON: ${{ secrets.GCS_SERVICE_ACCOUNT_KEY }} run: | eval "$(jq -r '.test_env_variables // {} | to_entries[] | "export \(.key)=\(.value | @sh)"' <<< '${{inputs.test_config}}')" make test_${{ inputs.build_type }} @@ -494,12 +477,6 @@ jobs: VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_target_triplet }} VCPKG_HOST_TRIPLET: ${{ matrix.vcpkg_host_triplet }} USE_MERGED_VCPKG_MANIFEST: ${{ inputs.use_merged_vcpkg_manifest }} - # VCPKG caching - AWS_ACCESS_KEY_ID: ${{ secrets.VCPKG_CACHING_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.VCPKG_CACHING_AWS_SECRET_ACCESS_KEY }} - AWS_ENDPOINT_URL: ${{ secrets.VCPKG_CACHING_AWS_ENDPOINT_URL }} - AWS_DEFAULT_REGION: ${{ secrets.VCPKG_CACHING_AWS_DEFAULT_REGION }} - AWS_REQUEST_CHECKSUM_CALCULATION: when_required # Misc OSX_BUILD_ARCH: ${{ matrix.osx_build_arch }} GEN: ninja @@ -514,14 +491,14 @@ jobs: repository: ${{ inputs.override_repository }} ref: ${{ inputs.override_ref }} fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout current repository if: ${{inputs.override_repository == ''}} with: fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - name: Install Ninja run: | @@ -537,12 +514,12 @@ jobs: - uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: "3.11" - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} @@ -555,7 +532,7 @@ jobs: if: ${{inputs.set_caller_as_duckdb}} uses: actions/checkout@v4 with: - path: 'duckdb' + path: "duckdb" fetch-tags: true fetch-depth: 0 @@ -598,11 +575,11 @@ jobs: run: | brew install gcc - - name: 'Setup go' + - name: "Setup go" if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}} uses: actions/setup-go@v4 with: - go-version: '1.23' + go-version: "1.23" - name: Install parser tools if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';parser_tools;')}} @@ -639,14 +616,6 @@ jobs: echo "CPPFLAGS=-I/opt/homebrew/opt/libomp/include" >> $GITHUB_ENV echo "CXXFLAGS=-I/opt/homebrew/opt/libomp/include" >> $GITHUB_ENV - - name: Override AWS CLI - if: ${{ (contains(format(';{0};', inputs.extra_toolchains), ';downgraded_aws_cli;')) }} - shell: bash - run: | - curl https://awscli.amazonaws.com/AWSCLIV2-2.22.35.pkg -o ./AWSCLIV2.pkg - sudo installer -pkg ./AWSCLIV2.pkg -target / - aws --version - - name: Run configure shell: bash env: @@ -723,12 +692,6 @@ jobs: VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_target_triplet }} VCPKG_HOST_TRIPLET: ${{ matrix.vcpkg_host_triplet }} USE_MERGED_VCPKG_MANIFEST: ${{ inputs.use_merged_vcpkg_manifest }} - # VCPKG caching - AWS_ACCESS_KEY_ID: ${{ secrets.VCPKG_CACHING_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.VCPKG_CACHING_AWS_SECRET_ACCESS_KEY }} - AWS_ENDPOINT_URL: ${{ secrets.VCPKG_CACHING_AWS_ENDPOINT_URL }} - AWS_DEFAULT_REGION: ${{ secrets.VCPKG_CACHING_AWS_DEFAULT_REGION }} - AWS_REQUEST_CHECKSUM_CALCULATION: when_required # Misc BUILD_SHELL: ${{ inputs.build_duckdb_shell && '1' || '0' }} DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }} @@ -737,20 +700,6 @@ jobs: GEN: ninja steps: - - name: Downgrade AWS cli - shell: powershell - if: ${{ (contains(format(';{0};', inputs.extra_toolchains), ';downgraded_aws_cli;')) }} - run: | - $app = Get-WmiObject -Class Win32_Product -Filter "Name LIKE 'AWS Command Line Interface%'" - if ($app) { $app.Uninstall() } - msiexec.exe /i https://awscli.amazonaws.com/AWSCLIV2-2.22.35.msi /qn - sleep 60 - - - name: Test AWS cli version - shell: powershell - run: | - aws --version - - name: Keep \n line endings shell: bash run: | @@ -764,28 +713,28 @@ jobs: repository: ${{ inputs.override_repository }} ref: ${{ inputs.override_ref }} fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout current repository if: ${{inputs.override_repository == ''}} with: fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: "3.11" - name: Setup Rust if: (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) uses: dtolnay/rust-toolchain@stable - - name: 'Setup go' + - name: "Setup go" if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}} uses: actions/setup-go@v4 with: - go-version: '1.23' + go-version: "1.23" - name: Install parser tools if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';parser_tools;')}} @@ -809,14 +758,14 @@ jobs: save: ${{ inputs.save_cache }} - uses: r-lib/actions/setup-r@v2 - if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw' + if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw' with: - r-version: 'devel' + r-version: "devel" update-rtools: true - rtools-version: '42' # linker bug in 43 + rtools-version: "42" # linker bug in 43 - name: setup rtools gcc for vcpkg - if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw' + if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw' run: | cp C:/rtools42/x86_64-w64-mingw32.static.posix/bin/gcc.exe C:/rtools42/x86_64-w64-mingw32.static.posix/bin/x86_64-w64-mingw32-gcc.exe cp C:/rtools42/x86_64-w64-mingw32.static.posix/bin/g++.exe C:/rtools42/x86_64-w64-mingw32.static.posix/bin/x86_64-w64-mingw32-g++.exe @@ -825,7 +774,7 @@ jobs: - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} @@ -840,7 +789,7 @@ jobs: if: ${{inputs.set_caller_as_duckdb}} uses: actions/checkout@v4 with: - path: 'duckdb' + path: "duckdb" fetch-tags: true fetch-depth: 0 @@ -982,12 +931,6 @@ jobs: VCPKG_HOST_TRIPLET: ${{ matrix.vcpkg_host_triplet }} VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake USE_MERGED_VCPKG_MANIFEST: ${{ inputs.use_merged_vcpkg_manifest }} - # VCPKG caching - AWS_ACCESS_KEY_ID: ${{ secrets.VCPKG_CACHING_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.VCPKG_CACHING_AWS_SECRET_ACCESS_KEY }} - AWS_ENDPOINT_URL: ${{ secrets.VCPKG_CACHING_AWS_ENDPOINT_URL }} - AWS_DEFAULT_REGION: ${{ secrets.VCPKG_CACHING_AWS_DEFAULT_REGION }} - AWS_REQUEST_CHECKSUM_CALCULATION: when_required DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }} WASM_EXTENSIONS: 1 @@ -1000,19 +943,19 @@ jobs: repository: ${{ inputs.override_repository }} ref: ${{ inputs.override_ref }} fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout current repository if: ${{inputs.override_repository == ''}} with: fetch-depth: 0 - submodules: 'recursive' + submodules: "recursive" - uses: actions/checkout@v4 name: Checkout Extension CI tools with: - path: 'extension-ci-tools' + path: "extension-ci-tools" ref: ${{ inputs.ci_tools_version }} repository: ${{ inputs.override_ci_tools_repository }} @@ -1025,7 +968,7 @@ jobs: if: ${{inputs.set_caller_as_duckdb}} uses: actions/checkout@v4 with: - path: 'duckdb' + path: "duckdb" fetch-tags: true fetch-depth: 0 @@ -1054,11 +997,11 @@ jobs: with: targets: wasm32-unknown-emscripten - - name: 'Setup go' + - name: "Setup go" if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}} uses: actions/setup-go@v4 with: - go-version: '1.23' + go-version: "1.23" - name: Setup vcpkg run: | @@ -1082,14 +1025,6 @@ jobs: restore-keys: ccache-extension-distribution-${{ matrix.duckdb_arch }}- save: ${{ inputs.save_cache }} - - name: Downgrade AWS cli - if: ${{ (contains(format(';{0};', inputs.extra_toolchains), ';downgraded_aws_cli;')) }} - run: | - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64-2.22.35.zip" -o "awscliv2.zip" - unzip -q awscliv2.zip - sudo ./aws/install --update - aws --version - - name: Inject extra extension config if: ${{ inputs.extra_extension_config }} shell: bash diff --git a/README.md b/README.md index be79b71..77d322a 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,6 @@ +> **IMPORTANT**: +> This extension is a fork of the community [DuckDB BigQuery](https://github.com/hafenkran/duckdb-bigquery) extension. It is not meant at being deployed to the community extension repository but aimed at being used for internal purposes. + # DuckDB BigQuery Extension This community extension allows [DuckDB](https://duckdb.org) to query data from Google BigQuery using a mix of BigQuery Storage (Read/Write) and REST API. It enables users to access, manage, and manipulate their BigQuery datasets/tables directly from DuckDB using standard SQL queries. Inspired by official DuckDB RDBMS extensions like [MySQL](https://duckdb.org/docs/extensions/mysql.html), [PostgreSQL](https://github.com/duckdb/postgres_scanner), and [SQLite](https://github.com/duckdb/sqlite_scanner), this extension offers a similar feel. See [Important Notes](#important-notes-on-using-google-bigquery) for disclaimers and usage information. @@ -528,11 +531,7 @@ docker run \ ## Testing -The extension includes two test suites designed for different testing scenarios: - -### Testing Against Real BigQuery - -The `test/sql/bigquery/` directory contains tests that run against a real Google BigQuery project. These tests verify the extension's behavior with the actual BigQuery API. +The `test/sql/` directory contains tests that run against a real Google BigQuery project. These tests verify the extension's behavior with the actual BigQuery API. **Prerequisites:** @@ -563,34 +562,6 @@ BQ_TEST_DATASET=your-dataset \ - `BQ_TEST_DATASET`: A test dataset in your project (will be used for read/write operations) - `BQ_SERVICE_ACCOUNT_JSON`: Full JSON content for secret authentication tests -### Testing Against BigQuery Emulator - -The `test/sql/local/` directory contains tests that run against the [BigQuery Emulator](https://github.com/goccy/bigquery-emulator), a local BigQuery-compatible server. These tests are ideal for CI/CD pipelines and development without incurring BigQuery costs. - -```bash -# Start the emulator -./bigquery-emulator --project=test --dataset=dataset1 -``` - -**Running Emulator Tests:** - -```bash -BQ_API_ENDPOINT=0.0.0.0:9050 \ -BQ_GRPC_ENDPOINT=0.0.0.0:9060 \ -./build/release/test/unittest 'test/sql/local/*.test' - -# Run a specific local test -BQ_API_ENDPOINT=0.0.0.0:9050 \ -BQ_GRPC_ENDPOINT=0.0.0.0:9060 \ -./build/release/test/unittest 'test/sql/local/attach_insert_table.test' -``` - -**Limitations:** - -- The emulator may not support all BigQuery features -- Some advanced SQL functions might behave differently -- Performance characteristics will differ from production BigQuery - ## Important Notes on Using Google BigQuery > **⚠️ Disclaimer**: This is an independent, community-maintained open-source project and is not affiliated with, endorsed by, or officially supported by Google LLC, or any of their subsidiaries. This extension is provided "as is" without any warranties or guarantees. "DuckDB" and "BigQuery" are trademarks of their respective owners. Users are solely responsible for compliance with applicable terms of service and any costs incurred through usage. diff --git a/test/sql/bigquery/attach_alter_table.test b/test/sql/attach_alter_table.test similarity index 98% rename from test/sql/bigquery/attach_alter_table.test rename to test/sql/attach_alter_table.test index dd4ddde..7377375 100644 --- a/test/sql/bigquery/attach_alter_table.test +++ b/test/sql/attach_alter_table.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_debug_show_queries=True; diff --git a/test/sql/bigquery/attach_billing_project.test b/test/sql/attach_billing_project.test similarity index 95% rename from test/sql/bigquery/attach_billing_project.test rename to test/sql/attach_billing_project.test index 87316d9..992e877 100644 --- a/test/sql/bigquery/attach_billing_project.test +++ b/test/sql/attach_billing_project.test @@ -10,8 +10,6 @@ require-env BQ_TEST_BILLING_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET} billing_project=${BQ_TEST_BILLING_PROJECT}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_bq_pseudo_columns.test b/test/sql/attach_bq_pseudo_columns.test similarity index 94% rename from test/sql/bigquery/attach_bq_pseudo_columns.test rename to test/sql/attach_bq_pseudo_columns.test index 26211fa..dd397f8 100644 --- a/test/sql/bigquery/attach_bq_pseudo_columns.test +++ b/test/sql/attach_bq_pseudo_columns.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_create_table.test b/test/sql/attach_create_table.test similarity index 97% rename from test/sql/bigquery/attach_create_table.test rename to test/sql/attach_create_table.test index 1c96b9e..a20c5bc 100644 --- a/test/sql/bigquery/attach_create_table.test +++ b/test/sql/attach_create_table.test @@ -10,8 +10,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_experimental_enable_bigquery_options=TRUE diff --git a/test/sql/bigquery/attach_default.test b/test/sql/attach_default.test similarity index 96% rename from test/sql/bigquery/attach_default.test rename to test/sql/attach_default.test index 8c52f4f..bc64b4b 100644 --- a/test/sql/bigquery/attach_default.test +++ b/test/sql/attach_default.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_dml_statements.test b/test/sql/attach_dml_statements.test similarity index 95% rename from test/sql/bigquery/attach_dml_statements.test rename to test/sql/attach_dml_statements.test index 5fdadd5..f82d685 100644 --- a/test/sql/bigquery/attach_dml_statements.test +++ b/test/sql/attach_dml_statements.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_debug_show_queries=true diff --git a/test/sql/bigquery/attach_filter_pushdown.test b/test/sql/attach_filter_pushdown.test similarity index 95% rename from test/sql/bigquery/attach_filter_pushdown.test rename to test/sql/attach_filter_pushdown.test index fbe3223..81bfae4 100644 --- a/test/sql/bigquery/attach_filter_pushdown.test +++ b/test/sql/attach_filter_pushdown.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_legacy_scan.test b/test/sql/attach_legacy_scan.test similarity index 97% rename from test/sql/bigquery/attach_legacy_scan.test rename to test/sql/attach_legacy_scan.test index 0907391..8c405cd 100644 --- a/test/sql/bigquery/attach_legacy_scan.test +++ b/test/sql/attach_legacy_scan.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_use_legacy_scan=true diff --git a/test/sql/bigquery/attach_public_dataset.test b/test/sql/attach_public_dataset.test similarity index 93% rename from test/sql/bigquery/attach_public_dataset.test rename to test/sql/attach_public_dataset.test index 3179386..2b51c9d 100644 --- a/test/sql/bigquery/attach_public_dataset.test +++ b/test/sql/attach_public_dataset.test @@ -10,8 +10,6 @@ require-env BQ_TEST_BILLING_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - query I SELECT COUNT(*) FROM bigquery_scan('bigquery-public-data.geo_us_boundaries.cnecta', billing_project=${BQ_TEST_BILLING_PROJECT}); ---- diff --git a/test/sql/bigquery/attach_table_not_exists.test b/test/sql/attach_table_not_exists.test similarity index 90% rename from test/sql/bigquery/attach_table_not_exists.test rename to test/sql/attach_table_not_exists.test index 3e5e104..4d5cd73 100644 --- a/test/sql/bigquery/attach_table_not_exists.test +++ b/test/sql/attach_table_not_exists.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_arrays.test b/test/sql/attach_types_arrays.test similarity index 94% rename from test/sql/bigquery/attach_types_arrays.test rename to test/sql/attach_types_arrays.test index 663804b..8a2deef 100644 --- a/test/sql/bigquery/attach_types_arrays.test +++ b/test/sql/attach_types_arrays.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_complex.test b/test/sql/attach_types_complex.test similarity index 98% rename from test/sql/bigquery/attach_types_complex.test rename to test/sql/attach_types_complex.test index dbf04de..6d79693 100644 --- a/test/sql/bigquery/attach_types_complex.test +++ b/test/sql/attach_types_complex.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_numeric.test b/test/sql/attach_types_numeric.test similarity index 98% rename from test/sql/bigquery/attach_types_numeric.test rename to test/sql/attach_types_numeric.test index f66f7a0..0bf6a3a 100644 --- a/test/sql/bigquery/attach_types_numeric.test +++ b/test/sql/attach_types_numeric.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_simple.test b/test/sql/attach_types_simple.test similarity index 95% rename from test/sql/bigquery/attach_types_simple.test rename to test/sql/attach_types_simple.test index 858a307..cba65f6 100644 --- a/test/sql/bigquery/attach_types_simple.test +++ b/test/sql/attach_types_simple.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_temporal.test b/test/sql/attach_types_temporal.test similarity index 97% rename from test/sql/bigquery/attach_types_temporal.test rename to test/sql/attach_types_temporal.test index 4698410..207f9bc 100644 --- a/test/sql/bigquery/attach_types_temporal.test +++ b/test/sql/attach_types_temporal.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_unsupported.test b/test/sql/attach_types_unsupported.test similarity index 96% rename from test/sql/bigquery/attach_types_unsupported.test rename to test/sql/attach_types_unsupported.test index 70d50c4..b3e64f7 100644 --- a/test/sql/bigquery/attach_types_unsupported.test +++ b/test/sql/attach_types_unsupported.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/attach_types_varchar.test b/test/sql/attach_types_varchar.test similarity index 97% rename from test/sql/bigquery/attach_types_varchar.test rename to test/sql/attach_types_varchar.test index 75482ce..b9feca8 100644 --- a/test/sql/bigquery/attach_types_varchar.test +++ b/test/sql/attach_types_varchar.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_bignumeric_as_varchar=TRUE diff --git a/test/sql/bigquery/attach_use_logic.test b/test/sql/attach_use_logic.test similarity index 94% rename from test/sql/bigquery/attach_use_logic.test rename to test/sql/attach_use_logic.test index 7da5019..b0b5c31 100644 --- a/test/sql/bigquery/attach_use_logic.test +++ b/test/sql/attach_use_logic.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/auth_env.test b/test/sql/auth_env.test similarity index 91% rename from test/sql/bigquery/auth_env.test rename to test/sql/auth_env.test index 795e35b..e8e3440 100644 --- a/test/sql/bigquery/auth_env.test +++ b/test/sql/auth_env.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/auth_secrets_basic.test b/test/sql/auth_secrets_basic.test similarity index 100% rename from test/sql/bigquery/auth_secrets_basic.test rename to test/sql/auth_secrets_basic.test diff --git a/test/sql/bigquery/auth_secrets_scopes.test b/test/sql/auth_secrets_scopes.test similarity index 100% rename from test/sql/bigquery/auth_secrets_scopes.test rename to test/sql/auth_secrets_scopes.test diff --git a/test/sql/bigquery/function_bigquery_execute.test b/test/sql/function_bigquery_execute.test similarity index 95% rename from test/sql/bigquery/function_bigquery_execute.test rename to test/sql/function_bigquery_execute.test index ecc2fd9..ea1a4c9 100644 --- a/test/sql/bigquery/function_bigquery_execute.test +++ b/test/sql/function_bigquery_execute.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SELECT * FROM bigquery_execute('${BQ_TEST_PROJECT}', 'DROP TABLE IF EXISTS `${BQ_TEST_PROJECT}.${BQ_TEST_DATASET}.exec_table`') diff --git a/test/sql/bigquery/function_bigquery_jobs.test b/test/sql/function_bigquery_jobs.test similarity index 97% rename from test/sql/bigquery/function_bigquery_jobs.test rename to test/sql/function_bigquery_jobs.test index e289baa..e98ed4f 100644 --- a/test/sql/bigquery/function_bigquery_jobs.test +++ b/test/sql/function_bigquery_jobs.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/function_bigquery_query.test b/test/sql/function_bigquery_query.test similarity index 97% rename from test/sql/bigquery/function_bigquery_query.test rename to test/sql/function_bigquery_query.test index 2c395fb..c02b807 100644 --- a/test/sql/bigquery/function_bigquery_query.test +++ b/test/sql/function_bigquery_query.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_debug_show_queries = true; diff --git a/test/sql/bigquery/function_bigquery_query_billing_project.test b/test/sql/function_bigquery_query_billing_project.test similarity index 96% rename from test/sql/bigquery/function_bigquery_query_billing_project.test rename to test/sql/function_bigquery_query_billing_project.test index 3884886..c64c768 100644 --- a/test/sql/bigquery/function_bigquery_query_billing_project.test +++ b/test/sql/function_bigquery_query_billing_project.test @@ -10,8 +10,6 @@ require-env BQ_TEST_BILLING_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok SET bq_debug_show_queries = true; diff --git a/test/sql/bigquery/function_bigquery_query_types.test b/test/sql/function_bigquery_query_types.test similarity index 99% rename from test/sql/bigquery/function_bigquery_query_types.test rename to test/sql/function_bigquery_query_types.test index a91a188..ebff9f5 100644 --- a/test/sql/bigquery/function_bigquery_query_types.test +++ b/test/sql/function_bigquery_query_types.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - statement ok ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery); diff --git a/test/sql/bigquery/geography_support.test b/test/sql/geography_support.test similarity index 98% rename from test/sql/bigquery/geography_support.test rename to test/sql/geography_support.test index 3c4f880..9ff0955 100644 --- a/test/sql/bigquery/geography_support.test +++ b/test/sql/geography_support.test @@ -8,8 +8,6 @@ require-env BQ_TEST_PROJECT require-env BQ_TEST_DATASET -require-env GOOGLE_APPLICATION_CREDENTIALS - # Create a simple table with GEOGRAPHY columns using bigquery_execute statement ok FROM bigquery_execute('${BQ_TEST_PROJECT}', ' diff --git a/test/sql/local/attach_clear_cache.test b/test/sql/local/attach_clear_cache.test deleted file mode 100644 index 3f04e3a..0000000 --- a/test/sql/local/attach_clear_cache.test +++ /dev/null @@ -1,42 +0,0 @@ -# name: test/sql/local/attach_clear_cache.test -# description: Clear BigQuery caches -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.cache_test; - -statement ok -CREATE TABLE bq.dataset1.cache_test(i INTEGER); - -statement ok -CALL bigquery_clear_cache(); - -query I -INSERT INTO bq.dataset1.cache_test VALUES (42); ----- -1 - -statement ok -CALL bigquery_clear_cache(); - -query I -SELECT * FROM bq.dataset1.cache_test; ----- -42 - -statement ok -CALL bigquery_clear_cache(); - -statement error -INSERT INTO bq.dataset1.tst VALUES (84) ----- -Catalog Error: Table with name tst does not exist! diff --git a/test/sql/local/attach_create_if_exists.test b/test/sql/local/attach_create_if_exists.test deleted file mode 100644 index 42dfc54..0000000 --- a/test/sql/local/attach_create_if_exists.test +++ /dev/null @@ -1,24 +0,0 @@ -# name: test/sql/storage/attach_create_if_exists.test -# description: Test IF EXISTS -# group: [storage] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.some_table; - -statement ok -CREATE TABLE bq.dataset1.some_table (i VARCHAR); - -# statement ok -# CREATE TABLE IF NOT EXISTS bq.dataset1.some_table (i VARCHAR); - -statement ok -SELECT i FROM bq.dataset1.some_table; diff --git a/test/sql/local/attach_database_size.test b/test/sql/local/attach_database_size.test deleted file mode 100644 index 6ddc5ff..0000000 --- a/test/sql/local/attach_database_size.test +++ /dev/null @@ -1,17 +0,0 @@ -# name: test/sql/local/attach_database_size.test -# description: Test fetching the database size -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement error -PRAGMA database_size ----- -BigQuery does not support getting database size diff --git a/test/sql/local/attach_delete_from_table.test b/test/sql/local/attach_delete_from_table.test deleted file mode 100644 index 67a0f38..0000000 --- a/test/sql/local/attach_delete_from_table.test +++ /dev/null @@ -1,63 +0,0 @@ -# name: test/sql/local/attach_delete_from_table.test -# description: Test DELETE statement on tables -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.test_delete; - -statement ok -CREATE TABLE bq.dataset1.test_delete(i INTEGER, s VARCHAR, n VARCHAR); - -statement ok -INSERT INTO bq.dataset1.test_delete VALUES (1, 'hi', 'y'), (2, 'hello', 'y'), (3, 'moin', 'y'); - -statement ok -DELETE FROM bq.dataset1.test_delete WHERE s='hello'; - -query III -SELECT * FROM bq.dataset1.test_delete; ----- -1 hi y -3 moin y - -statement ok -DELETE FROM bq.dataset1.test_delete WHERE i>1 AND s='hi'; - -query III -SELECT * FROM bq.dataset1.test_delete; ----- -1 hi y -3 moin y - -statement ok -DELETE FROM bq.dataset1.test_delete WHERE i<=1 AND s='hi'; - -query III -SELECT * FROM bq.dataset1.test_delete; ----- -3 moin y - -statement ok -INSERT INTO bq.dataset1.test_delete VALUES (3, 'moin', 'y'); - -statement ok -INSERT INTO bq.dataset1.test_delete VALUES (3, 'moin', 'y'); - -statement ok -INSERT INTO bq.dataset1.test_delete VALUES (3, 'moin', 'y'); - -statement ok -DELETE FROM bq.dataset1.test_delete WHERE i=3; - -query III -SELECT * FROM bq.dataset1.test_delete; ----- diff --git a/test/sql/local/attach_detach.test b/test/sql/local/attach_detach.test deleted file mode 100644 index c0cf6b7..0000000 --- a/test/sql/local/attach_detach.test +++ /dev/null @@ -1,40 +0,0 @@ -# name: test/sql/storage/attach_detach.test -# description: Testing DETACH -# group: [storage] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.test_detach; - -statement ok -CREATE TABLE bq.dataset1.test_detach(a INTEGER); - -statement ok -INSERT INTO bq.dataset1.test_detach VALUES (100), (200), (NULL), (300); - -statement ok -DETACH bq - -statement error -SELECT * FROM bq.dataset1.test_detach; ----- -Catalog "bq" does not exist! - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -query I -SELECT * FROM bq.dataset1.test_detach; ----- -100 -200 -NULL -300 diff --git a/test/sql/local/attach_insert_table.test b/test/sql/local/attach_insert_table.test deleted file mode 100644 index c8edeca..0000000 --- a/test/sql/local/attach_insert_table.test +++ /dev/null @@ -1,44 +0,0 @@ -# name: test/sql/local/attach_insert_table.test -# description: Test INSERT statement on tables -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.test_insert; - -statement ok -CREATE TABLE bq.dataset1.test_insert(a INTEGER, b VARCHAR NOT NULL, c FLOAT); - -query I -INSERT INTO bq.dataset1.test_insert VALUES (1, 'a', 0.1), (2, 'b', 0.2), (3, 'c', 0.3); ----- -3 - -statement ok -INSERT INTO bq.dataset1.test_insert VALUES (NULL, NULL, NULL); - -statement error -INSERT INTO bq.dataset1.test_insert VALUES (3, 'c', 0.3, 'overflow'); ----- -table test_insert has 3 columns but 4 values were supplied - -statement error -INSERT INTO bq.dataset1.test_insert VALUES (3, 'c'); ----- -table test_insert has 3 columns but 2 values were supplied - -statement error -INSERT INTO bq.dataset1.test_insert VALUES ('not INT32', 'c', 0.3); ----- -Could not convert string 'not INT32' to INT32 - -statement ok -INSERT INTO bq.dataset1.test_insert VALUES (5, NULL, 0.3); diff --git a/test/sql/local/attach_limit.test b/test/sql/local/attach_limit.test deleted file mode 100644 index aab4204..0000000 --- a/test/sql/local/attach_limit.test +++ /dev/null @@ -1,36 +0,0 @@ -# name: test/sql/local/attach_limit.test -# description: Test LIMIT over an attached BigQuery table -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.table_limit - -statement ok -CREATE TABLE bq.dataset1.table_limit AS FROM range(100000) t(i) - -query I -FROM bq.dataset1.table_limit LIMIT 5 ----- -0 -1 -2 -3 -4 - -query I -FROM bq.dataset1.table_limit LIMIT 5 OFFSET 5 ----- -5 -6 -7 -8 -9 diff --git a/test/sql/local/attach_read_only.test b/test/sql/local/attach_read_only.test deleted file mode 100644 index 43c70ac..0000000 --- a/test/sql/local/attach_read_only.test +++ /dev/null @@ -1,26 +0,0 @@ -# name: test/sql/storage/attach_read_only.test -# description: Test READ_ONLY -# group: [storage] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.read_only; - -statement ok -DETACH DATABASE IF EXISTS bq; - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery, READ_ONLY); - -statement error -CREATE TABLE bq.dataset1.read_only(i INTEGER); ----- -read-only mode diff --git a/test/sql/local/attach_types_arrays.test b/test/sql/local/attach_types_arrays.test deleted file mode 100644 index 91c45b1..0000000 --- a/test/sql/local/attach_types_arrays.test +++ /dev/null @@ -1,22 +0,0 @@ -# name: test/sql/local/attach_create_views.test -# description: Testing BigQuery Views -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.table_arrays; - -statement ok -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - int_array, - varchar_array, -FROM test_all_types(); diff --git a/test/sql/local/attach_types_blob.test b/test/sql/local/attach_types_blob.test deleted file mode 100644 index 4e71954..0000000 --- a/test/sql/local/attach_types_blob.test +++ /dev/null @@ -1,38 +0,0 @@ -# name: test/sql/local/attach_types_blob.test -# description: Test inserting/querying blobs -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.table_blob; - -statement ok -CREATE TABLE bq.dataset1.table_blob (b BLOB); - -statement ok -INSERT INTO bq.dataset1.table_blob VALUES ('\xBE\xEF'); - -statement ok -INSERT INTO bq.dataset1.table_blob VALUES (NULL); - -statement ok -INSERT INTO bq.dataset1.table_blob VALUES ('\xDE\xAD\xBE\xEF'); - -statement ok -INSERT INTO bq.dataset1.table_blob VALUES ('\xCA\xFE'); - -query I -SELECT * FROM bq.dataset1.table_blob ----- -vu8= -NULL -3q2+7w== -yv4= diff --git a/test/sql/local/attach_types_numerics.test b/test/sql/local/attach_types_numerics.test deleted file mode 100644 index 379235d..0000000 --- a/test/sql/local/attach_types_numerics.test +++ /dev/null @@ -1,38 +0,0 @@ -# name: test/sql/local/attach_types_numeric.test -# description: Test inserting/querying numerics -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -foreach type TINYINT SMALLINT INTEGER BIGINT UTINYINT USMALLINT UINTEGER FLOAT DOUBLE - -statement ok -DROP TABLE IF EXISTS bq.dataset1.numerics; - -statement ok -CREATE TABLE bq.dataset1.numerics(i ${type}); - -statement ok -INSERT INTO bq.dataset1.numerics values (0); - -statement ok -INSERT INTO bq.dataset1.numerics values (NULL); - -statement ok -INSERT INTO bq.dataset1.numerics values (1); - -query I -SELECT * FROM bq.dataset1.numerics ----- -0 -NULL -1 - -endloop diff --git a/test/sql/local/attach_types_timestamptz.test b/test/sql/local/attach_types_timestamptz.test deleted file mode 100644 index 36a9c9c..0000000 --- a/test/sql/local/attach_types_timestamptz.test +++ /dev/null @@ -1,20 +0,0 @@ -# name: test/sql/local/attach_types_timestamptz.test -# description: Test inserting timestamp with time zones -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.table_timestamp_with_tz; - -statement error -CREATE TABLE bq.dataset1.table_timestamp_with_tz(ts TIMESTAMP WITH TIME ZONE); ----- -TIMESTAMP WITH TIME ZONE not supported in BigQuery diff --git a/test/sql/local/attach_types_unsupported.test b/test/sql/local/attach_types_unsupported.test deleted file mode 100644 index 53e0da7..0000000 --- a/test/sql/local/attach_types_unsupported.test +++ /dev/null @@ -1,55 +0,0 @@ -# name: test/sql/local/attach_types_unsupported.test -# description: Tests various types that are not supported by BigQuery -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.table_arrays; - -statement error -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - nested_int_array -FROM test_all_types(); ----- -Nested lists or arrays are not supported in BigQuery. - -statement error -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - fixed_nested_int_array -FROM test_all_types(); ----- -Nested lists or arrays are not supported in BigQuery. - -statement error -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - fixed_nested_varchar_array -FROM test_all_types(); ----- -Nested lists or arrays are not supported in BigQuery. - -statement error -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - fixed_array_of_int_list -FROM test_all_types(); ----- -Nested lists or arrays are not supported in BigQuery. - -statement error -CREATE TABLE bq.dataset1.table_arrays AS -SELECT - list_of_fixed_int_array -FROM test_all_types(); ----- -Nested lists or arrays are not supported in BigQuery. diff --git a/test/sql/local/attach_types_varchar.test b/test/sql/local/attach_types_varchar.test deleted file mode 100644 index 3abca37..0000000 --- a/test/sql/local/attach_types_varchar.test +++ /dev/null @@ -1,30 +0,0 @@ -# name: test/sql/local/attach_types_varchar.test -# description: Test inserting/querying VARCHAR -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery); - -statement ok -DROP TABLE IF EXISTS bq.dataset1.varchars; - -statement ok -CREATE TABLE bq.dataset1.varchars (v VARCHAR); - -statement ok -INSERT INTO bq.dataset1.varchars VALUES (''); - -statement ok -INSERT INTO bq.dataset1.varchars VALUES ('some BIG...query string'); - -query I -SELECT * FROM bq.dataset1.varchars ----- -(empty) -some BIG...query string diff --git a/test/sql/local/attach_update_table.test b/test/sql/local/attach_update_table.test deleted file mode 100644 index ff987c2..0000000 --- a/test/sql/local/attach_update_table.test +++ /dev/null @@ -1,49 +0,0 @@ -# name: test/sql/local/attach_update_table.test -# description: Test UPDATE statement on tables -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery) - -statement ok -DROP TABLE IF EXISTS bq.dataset1.test_update; - -# multi column update in different orders -statement ok -CREATE TABLE bq.dataset1.test_update(i INTEGER, j INTEGER, k INTEGER); - -query I -INSERT INTO bq.dataset1.test_update VALUES (1, 10, 100), (2, NULL, 200), (3, 30, NULL), (4, 40, 400); ----- -4 - -query III -SELECT * FROM bq.dataset1.test_update ORDER BY 1 ----- -1 10 100 -2 NULL 200 -3 30 NULL -4 40 400 - -statement ok -UPDATE bq.dataset1.test_update SET k=990 + i, i=i, j=99 WHERE i=2 OR i=4 - -query III -SELECT * FROM bq.dataset1.test_update ORDER BY 1 ----- -1 10 100 -2 99 992 -3 30 NULL -4 99 994 - -# duplicates in SET statements -statement error -UPDATE bq.dataset1.test_update SET j=k, j=i ----- -Multiple assignments to same column diff --git a/test/sql/local/attach_update_table2.test b/test/sql/local/attach_update_table2.test deleted file mode 100644 index 84d8aad..0000000 --- a/test/sql/local/attach_update_table2.test +++ /dev/null @@ -1,54 +0,0 @@ -# name: test/sql/local/attach_update_table -# description: Test UPDATE statement on tables -# group: [local] - -require bigquery - -require-env BQ_API_ENDPOINT - -require-env BQ_GRPC_ENDPOINT - -statement ok -ATTACH 'project=test api_endpoint=${BQ_API_ENDPOINT} grpc_endpoint=${BQ_GRPC_ENDPOINT}' AS bq (TYPE bigquery) - -statement ok -DROP TABLE IF EXISTS bq.dataset1.test_update; - -statement ok -CREATE TABLE bq.dataset1.test_update(i INTEGER, s VARCHAR, n VARCHAR); - -statement ok -INSERT INTO bq.dataset1.test_update VALUES (1, 'hi', 'y'), (2, 'hello', 'y'), (3, 'moin', 'y'); - -statement ok -UPDATE bq.dataset1.test_update SET i=i+1 WHERE i>1; - -query III -SELECT * FROM bq.dataset1.test_update; ----- -1 hi y -3 hello y -4 moin y - -statement ok -UPDATE bq.dataset1.test_update SET i=i+100 WHERE s='hi'; - -query III -SELECT * FROM bq.dataset1.test_update; ----- -101 hi y -3 hello y -4 moin y - -statement ok -UPDATE bq.dataset1.test_update SET s='hello' WHERE s='hi'; - -statement ok -UPDATE bq.dataset1.test_update SET n='another string' WHERE i<100 AND s='hello'; - -query III -SELECT * FROM bq.dataset1.test_update; ----- -101 hello y -3 hello another string -4 moin y