diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1906822cf..c6ff55c6c 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.0rc1 +current_version = 1.0.0rc2 parse = (?P\d+) \.(?P\d+) \.(?P\d+) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index b09fcb225..85f8bef07 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -208,3 +208,19 @@ jobs: "You do not have permissions to run integration tests, @dbt-labs/core "\ "needs to label this PR with `ok to test` in order to run integration tests!" check_for_duplicate_msg: true + + slack-results: + runs-on: ubuntu-latest + needs: test + if: always() + + steps: + - name: Posting scheduled run failures + uses: ravsamhq/notify-slack-action@v1 + if: ${{ github.event_name == 'schedule' }} + with: + notification_title: 'BigQuery nightly integration test failed' + status: ${{ job.status }} + notify_when: 'failure' + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 8893752ad..3721a3474 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ ## dbt-bigquery 1.0.0 (Release TBD) +## dbt-bigquery 1.0.0rc2 (November 24, 2021) + +### Features +- Add optional `scopes` profile configuration argument to reduce the BigQuery OAuth scopes down to the minimal set needed. ([#23](https://github.com/dbt-labs/dbt-bigquery/issues/23), [#63](https://github.com/dbt-labs/dbt-bigquery/pull/63)) + +### Fixes +- Don't apply `require_partition_filter` to temporary tables, thereby fixing `insert_overwrite` strategy when partition filter is required ([#64](https://github.com/dbt-labs/dbt-bigquery/issues/64)), ([#65](https://github.com/dbt-labs/dbt-bigquery/pull/65)) + +### Under the hood +- Adding `execution_project` to `target` object ([#66](https://github.com/dbt-labs/dbt-bigquery/issues/66)) + +### Contributors +- [@pgoslatara](https://github.com/pgoslatara) ([#66](https://github.com/dbt-labs/dbt-bigquery/issues/66)) +- [@bborysenko](https://github.com/bborysenko) ([#63](https://github.com/dbt-labs/dbt-bigquery/pull/63)) +- [@yu-iskw](https://github.com/yu-iskw) ([#65](https://github.com/dbt-labs/dbt-bigquery/pull/65)) + ## dbt-bigquery 1.0.0rc1 (November 10, 2021) ### Fixes diff --git a/dbt/adapters/bigquery/__version__.py b/dbt/adapters/bigquery/__version__.py index 2c69af52f..a3f4934fd 100644 --- a/dbt/adapters/bigquery/__version__.py +++ b/dbt/adapters/bigquery/__version__.py @@ -1 +1 @@ -version = '1.0.0rc1' +version = '1.0.0rc2' diff --git a/dbt/adapters/bigquery/connections.py b/dbt/adapters/bigquery/connections.py index 984da6888..cfd4a3a04 100644 --- a/dbt/adapters/bigquery/connections.py +++ b/dbt/adapters/bigquery/connections.py @@ -110,6 +110,12 @@ class BigQueryCredentials(Credentials): client_secret: Optional[str] = None token_uri: Optional[str] = None + scopes: Optional[Tuple[str, ...]] = ( + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/drive' + ) + _ALIASES = { 'project': 'database', 'dataset': 'schema', @@ -127,7 +133,8 @@ def unique_field(self): def _connection_keys(self): return ('method', 'database', 'schema', 'location', 'priority', - 'timeout_seconds', 'maximum_bytes_billed') + 'timeout_seconds', 'maximum_bytes_billed', + 'execution_project') @classmethod def __pre_deserialize__(cls, d: Dict[Any, Any]) -> Dict[Any, Any]: @@ -148,10 +155,6 @@ def __pre_deserialize__(cls, d: Dict[Any, Any]) -> Dict[Any, Any]: class BigQueryConnectionManager(BaseConnectionManager): TYPE = 'bigquery' - SCOPE = ('https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/drive') - QUERY_TIMEOUT = 300 RETRIES = 1 DEFAULT_INITIAL_DELAY = 1.0 # Seconds @@ -245,16 +248,16 @@ def get_bigquery_credentials(cls, profile_credentials): creds = GoogleServiceAccountCredentials.Credentials if method == BigQueryConnectionMethod.OAUTH: - credentials, _ = get_bigquery_defaults(scopes=cls.SCOPE) + credentials, _ = get_bigquery_defaults(scopes=profile_credentials.scopes) return credentials elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT: keyfile = profile_credentials.keyfile - return creds.from_service_account_file(keyfile, scopes=cls.SCOPE) + return creds.from_service_account_file(keyfile, scopes=profile_credentials.scopes) elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT_JSON: details = profile_credentials.keyfile_json - return creds.from_service_account_info(details, scopes=cls.SCOPE) + return creds.from_service_account_info(details, scopes=profile_credentials.scopes) elif method == BigQueryConnectionMethod.OAUTH_SECRETS: return GoogleCredentials.Credentials( @@ -263,7 +266,7 @@ def get_bigquery_credentials(cls, profile_credentials): client_id=profile_credentials.client_id, client_secret=profile_credentials.client_secret, token_uri=profile_credentials.token_uri, - scopes=cls.SCOPE + scopes=profile_credentials.scopes ) error = ('Invalid `method` in profile: "{}"'.format(method)) @@ -275,7 +278,7 @@ def get_impersonated_bigquery_credentials(cls, profile_credentials): return impersonated_credentials.Credentials( source_credentials=source_credentials, target_principal=profile_credentials.impersonate_service_account, - target_scopes=list(cls.SCOPE), + target_scopes=list(profile_credentials.scopes), lifetime=profile_credentials.timeout_seconds, ) diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index 8e3384dd1..4007ba0e8 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -757,20 +757,22 @@ def get_table_options( ) -> Dict[str, Any]: opts = self.get_common_options(config, node, temporary) - if temporary: - expiration = 'TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL 12 hour)' - opts['expiration_timestamp'] = expiration - if config.get('kms_key_name') is not None: opts['kms_key_name'] = "'{}'".format(config.get('kms_key_name')) - if config.get('require_partition_filter'): - opts['require_partition_filter'] = config.get( - 'require_partition_filter') - - if config.get('partition_expiration_days') is not None: - opts['partition_expiration_days'] = config.get( - 'partition_expiration_days') + if temporary: + expiration = 'TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL 12 hour)' + opts['expiration_timestamp'] = expiration + else: + # It doesn't apply the `require_partition_filter` option for a temporary table + # so that we avoid the error by not specifying a partition with a temporary table + # in the incremental model. + if config.get('require_partition_filter') is not None: + opts['require_partition_filter'] = config.get( + 'require_partition_filter') + if config.get('partition_expiration_days') is not None: + opts['partition_expiration_days'] = config.get( + 'partition_expiration_days') return opts