diff --git a/CHANGELOG.md b/CHANGELOG.md index 987bc79c6..528cd08b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,9 @@ -## dbt-bigquery 1.1.0 (TBD) +## dbt-bigquery 1.1.0 (Release TBD) + +### Fixes +- Restore default behavior for query timeout. Set default `job_execution_timeout` to `None` by default. Keep 300 seconds as query timeout where previously used. + +## dbt-bigquery 1.1.0rc1 (April 13, 2022) ### Under the hood - Use dbt.tests.adapter.basic in tests (new test framework) ([#135](https://github.com/dbt-labs/dbt-bigquery/issues/135), [#142](https://github.com/dbt-labs/dbt-bigquery/pull/142)) diff --git a/dbt/adapters/bigquery/connections.py b/dbt/adapters/bigquery/connections.py index 05f236a55..5a5b83044 100644 --- a/dbt/adapters/bigquery/connections.py +++ b/dbt/adapters/bigquery/connections.py @@ -103,7 +103,7 @@ class BigQueryCredentials(Credentials): job_retry_deadline_seconds: Optional[int] = None job_retries: Optional[int] = 1 job_creation_timeout_seconds: Optional[int] = None - job_execution_timeout_seconds: Optional[int] = 300 + job_execution_timeout_seconds: Optional[int] = None # Keyfile json creds keyfile: Optional[str] = None @@ -301,7 +301,7 @@ def get_impersonated_bigquery_credentials(cls, profile_credentials): source_credentials=source_credentials, target_principal=profile_credentials.impersonate_service_account, target_scopes=list(profile_credentials.scopes), - lifetime=profile_credentials.job_execution_timeout_seconds, + lifetime=(profile_credentials.job_execution_timeout_seconds or 300), ) @classmethod @@ -524,7 +524,8 @@ def copy_bq_table(self, source, destination, write_disposition): def copy_and_results(): job_config = google.cloud.bigquery.CopyJobConfig(write_disposition=write_disposition) copy_job = client.copy_table(source_ref_array, destination_ref, job_config=job_config) - iterator = copy_job.result(timeout=self.get_job_execution_timeout_seconds(conn)) + timeout = self.get_job_execution_timeout_seconds(conn) or 300 + iterator = copy_job.result(timeout=timeout) return copy_job, iterator self._retry_and_handle( diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index 0fc5fc1cc..50ca3c6e1 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -624,7 +624,7 @@ def load_dataframe(self, database, schema, table_name, agate_table, column_overr with open(agate_table.original_abspath, "rb") as f: job = client.load_table_from_file(f, table_ref, rewind=True, job_config=load_config) - timeout = self.connections.get_job_execution_timeout_seconds(conn) + timeout = self.connections.get_job_execution_timeout_seconds(conn) or 300 with self.connections.exception_handler("LOAD TABLE"): self.poll_until_job_completes(job, timeout) @@ -647,7 +647,7 @@ def upload_file( with open(local_file_path, "rb") as f: job = client.load_table_from_file(f, table_ref, rewind=True, job_config=load_config) - timeout = self.connections.get_job_execution_timeout_seconds(conn) + timeout = self.connections.get_job_execution_timeout_seconds(conn) or 300 with self.connections.exception_handler("LOAD TABLE"): self.poll_until_job_completes(job, timeout)