forked from astronomer/astronomer-cosmos
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add AWS ECS task run execution mode (astronomer#1507)
This PR introduces an AWS ECS operator that allows Cosmos to run dbt tasks in AWS ECS. This ensures that dbt executions are consistent with the existing Airflow DAGs running in ECS. Similar to the Azure Container Instance execution mode, this enhancement provides an ECS-native execution mode for Cosmos users. ## What changes? * New `AwsEcsOperator` classes (inheriting from `AbstractDbtBaseOperator`) - Based on the original [`EcsRunTaskOperator`](https://github.com/apache/airflow/blob/7190cb23ea0f6c7e60a36479a6ee34882d86e90c/providers/src/airflow/providers/amazon/aws/operators/ecs.py#L331) * Tests * Adjusted documentation ## Related Issue(s) Closes: astronomer#1355 --------- Co-authored-by: Aldo Escobar <aoelvp94@gmail.com>
- Loading branch information
1 parent
a7dcd1a
commit 03358bf
Showing
10 changed files
with
710 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,207 @@ | ||
from __future__ import annotations | ||
|
||
import inspect | ||
from typing import Any, Callable, Sequence | ||
|
||
from airflow.utils.context import Context | ||
|
||
from cosmos.config import ProfileConfig | ||
from cosmos.log import get_logger | ||
from cosmos.operators.base import ( | ||
AbstractDbtBase, | ||
DbtBuildMixin, | ||
DbtLSMixin, | ||
DbtRunMixin, | ||
DbtRunOperationMixin, | ||
DbtSeedMixin, | ||
DbtSnapshotMixin, | ||
DbtSourceMixin, | ||
DbtTestMixin, | ||
) | ||
|
||
logger = get_logger(__name__) | ||
|
||
DEFAULT_ENVIRONMENT_VARIABLES: dict[str, str] = {} | ||
|
||
try: | ||
from airflow.providers.amazon.aws.operators.ecs import EcsRunTaskOperator | ||
except ImportError: # pragma: no cover | ||
raise ImportError( | ||
"Could not import EcsRunTaskOperator. Ensure you've installed the Amazon Web Services provider " | ||
"separately or with `pip install astronomer-cosmos[...,aws-ecs]`." | ||
) # pragma: no cover | ||
|
||
|
||
class DbtAwsEcsBaseOperator(AbstractDbtBase, EcsRunTaskOperator): # type: ignore | ||
""" | ||
Executes a dbt core cli command in an ECS Task instance with dbt installed in it. | ||
""" | ||
|
||
template_fields: Sequence[str] = tuple( | ||
list(AbstractDbtBase.template_fields) + list(EcsRunTaskOperator.template_fields) | ||
) | ||
|
||
intercept_flag = False | ||
|
||
def __init__( | ||
self, | ||
# arguments required by EcsRunTaskOperator | ||
aws_conn_id: str, | ||
cluster: str, | ||
task_definition: str, | ||
container_name: str, | ||
# | ||
profile_config: ProfileConfig | None = None, | ||
command: list[str] | None = None, | ||
environment_variables: dict[str, Any] | None = None, | ||
**kwargs: Any, | ||
) -> None: | ||
self.profile_config = profile_config | ||
self.command = command | ||
self.environment_variables = environment_variables or DEFAULT_ENVIRONMENT_VARIABLES | ||
self.container_name = container_name | ||
kwargs.update( | ||
{ | ||
"aws_conn_id": aws_conn_id, | ||
"task_definition": task_definition, | ||
"cluster": cluster, | ||
"overrides": None, | ||
} | ||
) | ||
super().__init__(**kwargs) | ||
# In PR #1474, we refactored cosmos.operators.base.AbstractDbtBase to remove its inheritance from BaseOperator | ||
# and eliminated the super().__init__() call. This change was made to resolve conflicts in parent class | ||
# initializations while adding support for ExecutionMode.AIRFLOW_ASYNC. Operators under this mode inherit | ||
# Airflow provider operators that enable deferrable SQL query execution. Since super().__init__() was removed | ||
# from AbstractDbtBase and different parent classes require distinct initialization arguments, we explicitly | ||
# initialize them (including the BaseOperator) here by segregating the required arguments for each parent class. | ||
base_operator_args = set(inspect.signature(EcsRunTaskOperator.__init__).parameters.keys()) | ||
base_kwargs = {} | ||
for arg_key, arg_value in kwargs.items(): | ||
if arg_key in base_operator_args: | ||
base_kwargs[arg_key] = arg_value | ||
base_kwargs["task_id"] = kwargs["task_id"] | ||
base_kwargs["aws_conn_id"] = aws_conn_id | ||
EcsRunTaskOperator.__init__(self, **base_kwargs) | ||
|
||
def build_and_run_cmd( | ||
self, | ||
context: Context, | ||
cmd_flags: list[str] | None = None, | ||
run_as_async: bool = False, | ||
async_context: dict[str, Any] | None = None, | ||
) -> Any: | ||
self.build_command(context, cmd_flags) | ||
self.log.info(f"Running command: {self.command}") | ||
|
||
result = EcsRunTaskOperator.execute(self, context) | ||
|
||
logger.info(result) | ||
|
||
def build_command(self, context: Context, cmd_flags: list[str] | None = None) -> None: | ||
# For the first round, we're going to assume that the command is dbt | ||
# This means that we don't have openlineage support, but we will create a ticket | ||
# to add that in the future | ||
self.dbt_executable_path = "dbt" | ||
dbt_cmd, env_vars = self.build_cmd(context=context, cmd_flags=cmd_flags) | ||
self.environment_variables = {**env_vars, **self.environment_variables} | ||
self.command = dbt_cmd | ||
# Override Ecs Task Run default arguments with dbt command | ||
self.overrides = { | ||
"containerOverrides": [ | ||
{ | ||
"name": self.container_name, | ||
"command": self.command, | ||
"environment": [{"name": key, "value": value} for key, value in self.environment_variables.items()], | ||
} | ||
] | ||
} | ||
|
||
|
||
class DbtBuildAwsEcsOperator(DbtBuildMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core build command. | ||
""" | ||
|
||
template_fields: Sequence[str] = DbtAwsEcsBaseOperator.template_fields + DbtBuildMixin.template_fields # type: ignore[operator] | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtLSAwsEcsOperator(DbtLSMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core ls command. | ||
""" | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtSeedAwsEcsOperator(DbtSeedMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core seed command. | ||
:param full_refresh: dbt optional arg - dbt will treat incremental models as table models | ||
""" | ||
|
||
template_fields: Sequence[str] = DbtAwsEcsBaseOperator.template_fields + DbtSeedMixin.template_fields # type: ignore[operator] | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtSnapshotAwsEcsOperator(DbtSnapshotMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core snapshot command. | ||
""" | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtSourceAwsEcsOperator(DbtSourceMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt source freshness command. | ||
""" | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtRunAwsEcsOperator(DbtRunMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core run command. | ||
""" | ||
|
||
template_fields: Sequence[str] = DbtAwsEcsBaseOperator.template_fields + DbtRunMixin.template_fields # type: ignore[operator] | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
|
||
|
||
class DbtTestAwsEcsOperator(DbtTestMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core test command. | ||
""" | ||
|
||
def __init__(self, on_warning_callback: Callable[..., Any] | None = None, **kwargs: str) -> None: | ||
super().__init__(**kwargs) | ||
# as of now, on_warning_callback in docker executor does nothing | ||
self.on_warning_callback = on_warning_callback | ||
|
||
|
||
class DbtRunOperationAwsEcsOperator(DbtRunOperationMixin, DbtAwsEcsBaseOperator): | ||
""" | ||
Executes a dbt core run-operation command. | ||
:param macro_name: name of macro to execute | ||
:param args: Supply arguments to the macro. This dictionary will be mapped to the keyword arguments defined in the | ||
selected macro. | ||
""" | ||
|
||
template_fields: Sequence[str] = DbtAwsEcsBaseOperator.template_fields + DbtRunOperationMixin.template_fields # type: ignore[operator] | ||
|
||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Oops, something went wrong.