diff --git a/cosmos/operators/airflow_async.py b/cosmos/operators/airflow_async.py index f278dcba8..9a6c3e862 100644 --- a/cosmos/operators/airflow_async.py +++ b/cosmos/operators/airflow_async.py @@ -6,25 +6,22 @@ from airflow.providers.google.cloud.operators.bigquery import BigQueryInsertJobOperator from airflow.utils.context import Context -from cosmos.operators.base import DbtCompileMixin from cosmos.operators.local import ( DbtBuildLocalOperator, + DbtCompileLocalOperator, DbtDepsLocalOperator, DbtDocsAzureStorageLocalOperator, DbtDocsCloudLocalOperator, DbtDocsGCSLocalOperator, DbtDocsLocalOperator, DbtDocsS3LocalOperator, - DbtLocalBaseOperator, DbtLSLocalOperator, + DbtRunOperationLocalOperator, DbtSeedLocalOperator, DbtSnapshotLocalOperator, DbtSourceLocalOperator, DbtTestLocalOperator, - DbtRunOperationLocalOperator, - DbtCompileLocalOperator, ) - from cosmos.settings import remote_target_path, remote_target_path_conn_id _SUPPORTED_DATABASES = ["bigquery"] @@ -95,7 +92,6 @@ def execute(self, context: Context) -> Any | None: super().execute(context) - class DbtTestAirflowAsyncOperator(DbtTestLocalOperator): pass @@ -127,7 +123,6 @@ class DbtDocsGCSAirflowAsyncOperator(DbtDocsGCSLocalOperator): class DbtCompileAirflowAsyncOperator(DbtCompileLocalOperator): pass + class DbtDepsAirflowAsyncOperator(DbtDepsLocalOperator): pass - - diff --git a/cosmos/operators/local.py b/cosmos/operators/local.py index ad320d5fc..ee0754f75 100644 --- a/cosmos/operators/local.py +++ b/cosmos/operators/local.py @@ -291,7 +291,6 @@ def _configure_remote_target_path() -> tuple[Path, str] | tuple[None, None]: return _configured_target_path, remote_conn_id - def upload_compiled_sql(self, tmp_project_dir: str, context: Context) -> None: """ Uploads the compiled SQL files from the dbt compile output to the remote store.