Skip to content

Commit

Permalink
Rename astronomer/operators to astronomer/providers
Browse files Browse the repository at this point in the history
  • Loading branch information
kaxil committed Feb 19, 2022
1 parent 3d903b3 commit 5717851
Show file tree
Hide file tree
Showing 109 changed files with 148 additions and 148 deletions.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from airflow import DAG
from airflow.utils.dates import days_ago

from astronomer.operators.amazon.aws.sensors.s3 import S3KeySensorAsync
from astronomer.providers.amazon.aws.sensors.s3 import S3KeySensorAsync

default_args = {
"retry": 5,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging

from astronomer.operators.amazon.aws.hooks.base_aws_async import AwsBaseHookAsync
from astronomer.providers.amazon.aws.hooks.base_aws_async import AwsBaseHookAsync

log = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook

from astronomer.operators.amazon.aws.triggers.s3 import S3KeyTrigger
from astronomer.providers.amazon.aws.triggers.s3 import S3KeyTrigger

log = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from airflow.triggers.base import BaseTrigger, TriggerEvent
from botocore.exceptions import ClientError

from astronomer.operators.amazon.aws.hooks.s3 import S3HookAsync
from astronomer.providers.amazon.aws.hooks.s3 import S3HookAsync

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -84,7 +84,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serialize S3KeyTrigger arguments and classpath.
"""
return (
"astronomer.operators.amazon.aws.triggers.s3.S3KeyTrigger",
"astronomer.providers.amazon.aws.triggers.s3.S3KeyTrigger",
{
"bucket_name": self.bucket_name,
"bucket_key": self.bucket_key,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
KubernetesPodOperator,
)

from astronomer.operators.cncf.kubernetes.triggers.wait_container import (
from astronomer.providers.cncf.kubernetes.triggers.wait_container import (
PodLaunchTimeoutException,
WaitContainerTrigger,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from airflow.utils import timezone
from kubernetes_asyncio.client import CoreV1Api

from astronomer.operators.cncf.kubernetes.hooks.kubernetes_async import (
from astronomer.providers.cncf.kubernetes.hooks.kubernetes_async import (
KubernetesHookAsync,
)

Expand Down Expand Up @@ -63,7 +63,7 @@ def __init__(

def serialize(self) -> Tuple[str, Dict[str, Any]]:
return (
"astronomer.operators.cncf.kubernetes.triggers.wait_container.WaitContainerTrigger",
"astronomer.providers.cncf.kubernetes.triggers.wait_container.WaitContainerTrigger",
dict(
kubernetes_conn_id=self.kubernetes_conn_id,
hook_params=self.hook_params,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from airflow import DAG
from airflow.utils.dates import days_ago

from astronomer.operators.core.sensors.external_task import ExternalTaskSensorAsync
from astronomer.providers.core.sensors.external_task import ExternalTaskSensorAsync

default_args = {"start_date": days_ago(0)}
with DAG("test_external_task_async", schedule_interval="@daily", default_args=default_args) as dag:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import airflow
from airflow.utils.dates import days_ago

from astronomer.operators.core.sensors.filesystem import FileSensorAsync
from astronomer.providers.core.sensors.filesystem import FileSensorAsync

with airflow.DAG("example_async_file_sensor", start_date=days_ago(1), tags=["async"]) as dag:
sensor_task = FileSensorAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from airflow.sensors.external_task import ExternalTaskSensor
from airflow.utils.session import provide_session

from astronomer.operators.core.triggers.external_task import (
from astronomer.providers.core.triggers.external_task import (
DagStateTrigger,
TaskStateTrigger,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from airflow.hooks.filesystem import FSHook
from airflow.sensors.filesystem import FileSensor

from astronomer.operators.core.triggers.filesystem import FileTrigger
from astronomer.providers.core.triggers.filesystem import FileTrigger

log = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes TaskStatetrigger arguments and classpath.
"""
return (
"astronomer.operators.core.triggers.external_task.TaskStateTrigger",
"astronomer.providers.core.triggers.external_task.TaskStateTrigger",
{
"dag_id": self.dag_id,
"task_id": self.task_id,
Expand Down Expand Up @@ -90,7 +90,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes DagStateTrigger arguments and classpath.
"""
return (
"astronomer.operators.core.triggers.external_dag.DagStateTrigger",
"astronomer.providers.core.triggers.external_dag.DagStateTrigger",
{
"dag_id": self.dag_id,
"states": self.states,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes FileTrigger arguments and classpath.
"""
return (
"astronomer.operators.core.triggers.filesystem.FileTrigger",
"astronomer.providers.core.triggers.filesystem.FileTrigger",
{
"filepath": self.filepath,
"recursive": self.recursive,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from airflow import DAG
from airflow.utils.dates import days_ago

from astronomer.operators.databricks.operators.databricks import (
from astronomer.providers.databricks.operators.databricks import (
DatabricksRunNowOperatorAsync,
DatabricksSubmitRunOperatorAsync,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
DatabricksSubmitRunOperator,
)

from astronomer.operators.databricks.triggers.databricks import DatabricksTrigger
from astronomer.providers.databricks.triggers.databricks import DatabricksTrigger


class DatabricksSubmitRunOperatorAsync(DatabricksSubmitRunOperator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from airflow import AirflowException
from airflow.triggers.base import BaseTrigger, TriggerEvent

from astronomer.operators.databricks.hooks.databricks import DatabricksHookAsync
from astronomer.providers.databricks.hooks.databricks import DatabricksHookAsync


class DatabricksTrigger(BaseTrigger):
Expand All @@ -30,7 +30,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes DatabricksTrigger arguments and classpath.
"""
return (
"astronomer.operators.databricks.triggers.databricks.DatabricksTrigger",
"astronomer.providers.databricks.triggers.databricks.DatabricksTrigger",
{
"conn_id": self.conn_id,
"task_id": self.task_id,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
BigQueryValueCheckOperator,
)

from astronomer.operators.google.cloud.operators.bigquery import (
from astronomer.providers.google.cloud.operators.bigquery import (
BigQueryInsertJobOperatorAsync,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
LocalFilesystemToGCSOperator,
)

from astronomer.operators.google.cloud.sensors.gcs import GCSObjectExistenceSensorAsync
from astronomer.providers.google.cloud.sensors.gcs import GCSObjectExistenceSensorAsync

START_DATE = datetime(2022, 1, 1)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from gcloud.aio.bigquery import Job
from google.cloud.bigquery import CopyJob, ExtractJob, LoadJob, QueryJob

from astronomer.operators.google.common.hooks.base_google import GoogleBaseHookAsync
from astronomer.providers.google.common.hooks.base_google import GoogleBaseHookAsync

BigQueryJob = Union[CopyJob, QueryJob, LoadJob, ExtractJob]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from gcloud.aio.storage import Storage

from astronomer.operators.google.common.hooks.base_google import GoogleBaseHookAsync
from astronomer.providers.google.common.hooks.base_google import GoogleBaseHookAsync

DEFAULT_TIMEOUT = 60

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.


"""This module contains Google BigQueryAsync operators."""
"""This module contains Google BigQueryAsync providers."""
from typing import TYPE_CHECKING

from airflow.exceptions import AirflowException
Expand All @@ -26,8 +26,8 @@
from airflow.providers.google.cloud.operators.bigquery import BigQueryInsertJobOperator
from google.api_core.exceptions import Conflict

from astronomer.operators.google.cloud.hooks.bigquery import _BigQueryHook
from astronomer.operators.google.cloud.triggers.bigquery import BigQueryInsertJobTrigger
from astronomer.providers.google.cloud.hooks.bigquery import _BigQueryHook
from astronomer.providers.google.cloud.triggers.bigquery import BigQueryInsertJobTrigger

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from airflow.exceptions import AirflowException
from airflow.models.baseoperator import BaseOperator

from astronomer.operators.google.cloud.triggers.gcs import GCSBlobTrigger
from astronomer.providers.google.cloud.triggers.gcs import GCSBlobTrigger


class GCSObjectExistenceSensorAsync(BaseOperator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from airflow.triggers.base import BaseTrigger, TriggerEvent

from astronomer.operators.google.cloud.hooks.bigquery import BigQueryHookAsync
from astronomer.providers.google.cloud.hooks.bigquery import BigQueryHookAsync


class BigQueryInsertJobTrigger(BaseTrigger):
Expand Down Expand Up @@ -32,7 +32,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes BigQueryInsertJobTrigger arguments and classpath.
"""
return (
"astronomer.operators.google.cloud.triggers.bigquery.BigQueryInsertJobTrigger",
"astronomer.providers.google.cloud.triggers.bigquery.BigQueryInsertJobTrigger",
{
"conn_id": self.conn_id,
"job_id": self.job_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from aiohttp import ClientSession as Session
from airflow.triggers.base import BaseTrigger, TriggerEvent

from astronomer.operators.google.cloud.hooks.gcs import GCSHookAsync
from astronomer.providers.google.cloud.hooks.gcs import GCSHookAsync

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -40,7 +40,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes GCSBlobTrigger arguments and classpath.
"""
return (
"astronomer.operators.google.cloud.triggers.gcs.GCSBlobTrigger",
"astronomer.providers.google.cloud.triggers.gcs.GCSBlobTrigger",
{
"bucket": self.bucket,
"object_name": self.object_name,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
import warnings

from astronomer.operators.http.operators.http import HttpSensorAsync
from astronomer.operators.http.triggers.http import HttpTrigger
from astronomer.providers.http.operators.http import HttpSensorAsync
from astronomer.providers.http.triggers.http import HttpTrigger

DEPRECATED_NAMES = {
"HttpSensorAsync": "astronomer.operators.http.operators.http",
"HttpTrigger": "astronomer.operators.http.triggers.http",
"HttpSensorAsync": "astronomer.providers.http.providers.http",
"HttpTrigger": "astronomer.providers.http.triggers.http",
}


def __getattr__(name):
if name in DEPRECATED_NAMES:
mod = DEPRECATED_NAMES[name]
warnings.warn(
f"Importing {name} from `astronomer.operators.http` is deprecated; please use `{mod}`.",
f"Importing {name} from `astronomer.providers.http` is deprecated; please use `{mod}`.",
DeprecationWarning,
stacklevel=2,
)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from airflow import DAG
from airflow.utils.dates import days_ago

from astronomer.operators.http.operators.http import HttpSensorAsync
from astronomer.providers.http.operators.http import HttpSensorAsync

with DAG("example_async_http_sensor", tags=["example", "async"], start_date=days_ago(2)) as dag:
# This task will continue to defer as it will receive 404 every time
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from airflow.providers.http.sensors.http import HttpSensor

from astronomer.operators.http.triggers.http import HttpTrigger
from astronomer.providers.http.triggers.http import HttpTrigger


class HttpSensorAsync(HttpSensor):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from airflow import AirflowException
from airflow.triggers.base import BaseTrigger, TriggerEvent

from astronomer.operators.http.hooks.http import HttpHookAsync
from astronomer.providers.http.hooks.http import HttpHookAsync


class HttpTrigger(BaseTrigger):
Expand Down Expand Up @@ -57,7 +57,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes HttpTrigger arguments and classpath.
"""
return (
"astronomer.operators.http.triggers.http.HttpTrigger",
"astronomer.providers.http.triggers.http.HttpTrigger",
{
"endpoint": self.endpoint,
"data": self.data,
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
# specific language governing permissions and limitations
# under the License.
"""
Example use of SnowflakeAsync related operators.
Example use of SnowflakeAsync related providers.
"""
from airflow import DAG
from airflow.utils.dates import days_ago

from astronomer.operators.snowflake.operators.snowflake import SnowflakeOperatorAsync
from astronomer.providers.snowflake.operators.snowflake import SnowflakeOperatorAsync

SNOWFLAKE_CONN_ID = "my_snowflake_conn"
SNOWFLAKE_SAMPLE_TABLE = "sample_table"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from airflow.exceptions import AirflowException
from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator

from astronomer.operators.snowflake.hooks.snowflake import SnowflakeHookAsync
from astronomer.operators.snowflake.triggers.snowflake_trigger import (
from astronomer.providers.snowflake.hooks.snowflake import SnowflakeHookAsync
from astronomer.providers.snowflake.triggers.snowflake_trigger import (
SnowflakeTrigger,
get_db_hook,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from airflow.exceptions import AirflowException
from airflow.triggers.base import BaseTrigger, TriggerEvent

from astronomer.operators.snowflake.hooks.snowflake import SnowflakeHookAsync
from astronomer.providers.snowflake.hooks.snowflake import SnowflakeHookAsync


def get_db_hook(self) -> SnowflakeHookAsync:
Expand Down Expand Up @@ -37,7 +37,7 @@ def serialize(self) -> Tuple[str, Dict[str, Any]]:
Serializes SnowflakeTrigger arguments and classpath.
"""
return (
"astronomer.operators.snowflake.triggers.snowflake_trigger.SnowflakeTrigger",
"astronomer.providers.snowflake.triggers.snowflake_trigger.SnowflakeTrigger",
{
"task_id": self.task_id,
"polling_period_seconds": self.polling_period_seconds,
Expand Down
4 changes: 2 additions & 2 deletions dev/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ x-airflow-common:
# In order to add custom dependencies or upgrade provider packages you can use your extended image.
# Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml
# and uncomment the "build" line below, Then run `docker-compose build` to build the images.
image: astronomer-operators-dev
image: astronomer-providers-dev
build:
context: ..
dockerfile: dev/Dockerfile
Expand All @@ -27,7 +27,7 @@ x-airflow-common:
- ./dags:/usr/local/airflow/dags
- ./logs:/usr/local/airflow/logs
- ./plugins:/usr/local/airflow/plugins
- ../../astronomer-operators:/usr/local/airflow/astronomer_operators
- ../../astronomer-providers:/usr/local/airflow/astronomer_operators
depends_on:
&airflow-common-depends-on
redis:
Expand Down
Empty file added tests/amazon/__init__.py
Empty file.
Empty file added tests/amazon/aws/__init__.py
Empty file.
Empty file.
Loading

0 comments on commit 5717851

Please sign in to comment.