Skip to content

Commit

Permalink
Set job_execution_timeout_seconds default to None (#159)
Browse files Browse the repository at this point in the history
* Set job_execution_timeout_seconds default to None

* Pass old default of 300s in previous call sites

* Clearer factoring
  • Loading branch information
jtcohen6 authored Apr 14, 2022
1 parent e70d873 commit 8f7208c
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 6 deletions.
7 changes: 6 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
## dbt-bigquery 1.1.0 (TBD)
## dbt-bigquery 1.1.0 (Release TBD)

### Fixes
- Restore default behavior for query timeout. Set default `job_execution_timeout` to `None` by default. Keep 300 seconds as query timeout where previously used.

## dbt-bigquery 1.1.0rc1 (April 13, 2022)

### Under the hood
- Use dbt.tests.adapter.basic in tests (new test framework) ([#135](https://github.com/dbt-labs/dbt-bigquery/issues/135), [#142](https://github.com/dbt-labs/dbt-bigquery/pull/142))
Expand Down
7 changes: 4 additions & 3 deletions dbt/adapters/bigquery/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class BigQueryCredentials(Credentials):
job_retry_deadline_seconds: Optional[int] = None
job_retries: Optional[int] = 1
job_creation_timeout_seconds: Optional[int] = None
job_execution_timeout_seconds: Optional[int] = 300
job_execution_timeout_seconds: Optional[int] = None

# Keyfile json creds
keyfile: Optional[str] = None
Expand Down Expand Up @@ -301,7 +301,7 @@ def get_impersonated_bigquery_credentials(cls, profile_credentials):
source_credentials=source_credentials,
target_principal=profile_credentials.impersonate_service_account,
target_scopes=list(profile_credentials.scopes),
lifetime=profile_credentials.job_execution_timeout_seconds,
lifetime=(profile_credentials.job_execution_timeout_seconds or 300),
)

@classmethod
Expand Down Expand Up @@ -524,7 +524,8 @@ def copy_bq_table(self, source, destination, write_disposition):
def copy_and_results():
job_config = google.cloud.bigquery.CopyJobConfig(write_disposition=write_disposition)
copy_job = client.copy_table(source_ref_array, destination_ref, job_config=job_config)
iterator = copy_job.result(timeout=self.get_job_execution_timeout_seconds(conn))
timeout = self.get_job_execution_timeout_seconds(conn) or 300
iterator = copy_job.result(timeout=timeout)
return copy_job, iterator

self._retry_and_handle(
Expand Down
4 changes: 2 additions & 2 deletions dbt/adapters/bigquery/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ def load_dataframe(self, database, schema, table_name, agate_table, column_overr
with open(agate_table.original_abspath, "rb") as f:
job = client.load_table_from_file(f, table_ref, rewind=True, job_config=load_config)

timeout = self.connections.get_job_execution_timeout_seconds(conn)
timeout = self.connections.get_job_execution_timeout_seconds(conn) or 300
with self.connections.exception_handler("LOAD TABLE"):
self.poll_until_job_completes(job, timeout)

Expand All @@ -647,7 +647,7 @@ def upload_file(
with open(local_file_path, "rb") as f:
job = client.load_table_from_file(f, table_ref, rewind=True, job_config=load_config)

timeout = self.connections.get_job_execution_timeout_seconds(conn)
timeout = self.connections.get_job_execution_timeout_seconds(conn) or 300
with self.connections.exception_handler("LOAD TABLE"):
self.poll_until_job_completes(job, timeout)

Expand Down

0 comments on commit 8f7208c

Please sign in to comment.