diff --git a/metaflow/plugins/airflow/sensors/external_task_sensor.py b/metaflow/plugins/airflow/sensors/external_task_sensor.py index c599a05b4a..aa5fe454e0 100644 --- a/metaflow/plugins/airflow/sensors/external_task_sensor.py +++ b/metaflow/plugins/airflow/sensors/external_task_sensor.py @@ -28,27 +28,27 @@ class ExternalTaskSensorDecorator(AirflowSensorDecorator): Time, in seconds before the task times out and fails. (Default: 3600) poke_interval : int Time in seconds that the job should wait in between each try. (Default: 60) - mode : string + mode : str How the sensor operates. Options are: { poke | reschedule }. (Default: "poke") exponential_backoff : bool allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True) - pool : string + pool : str the slot pool this task should run in, slot pools are a way to limit concurrency for certain tasks. (Default:None) soft_fail : bool Set to true to mark the task as SKIPPED on failure. (Default: False) - name : string + name : str Name of the sensor on Airflow - description : string + description : str Description of sensor in the Airflow UI - external_dag_id : string + external_dag_id : str The dag_id that contains the task you want to wait for. - external_task_ids : List[string] + external_task_ids : List[str] The list of task_ids that you want to wait for. If None (default value) the sensor waits for the DAG. (Default: None) - allowed_states : List[string] + allowed_states : List[str] Iterable of allowed states, (Default: ['success']) - failed_states : List[string] + failed_states : List[str] Iterable of failed or dis-allowed states. (Default: None) execution_delta : datetime.timedelta time difference with the previous execution to look at, diff --git a/metaflow/plugins/airflow/sensors/s3_sensor.py b/metaflow/plugins/airflow/sensors/s3_sensor.py index 6fac6725d2..ff5b12be61 100644 --- a/metaflow/plugins/airflow/sensors/s3_sensor.py +++ b/metaflow/plugins/airflow/sensors/s3_sensor.py @@ -17,20 +17,20 @@ class S3KeySensorDecorator(AirflowSensorDecorator): Time, in seconds before the task times out and fails. (Default: 3600) poke_interval : int Time in seconds that the job should wait in between each try. (Default: 60) - mode : string + mode : str How the sensor operates. Options are: { poke | reschedule }. (Default: "poke") exponential_backoff : bool allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True) - pool : string + pool : str the slot pool this task should run in, slot pools are a way to limit concurrency for certain tasks. (Default:None) soft_fail : bool Set to true to mark the task as SKIPPED on failure. (Default: False) - name : string + name : str Name of the sensor on Airflow - description : string + description : str Description of sensor in the Airflow UI - bucket_key : str | List[str] + bucket_key : Union[str, List[str]] The key(s) being waited on. Supports full s3:// style url or relative path from root level. When it's specified as a full s3:// url, please leave `bucket_name` as None bucket_name : str @@ -38,7 +38,7 @@ class S3KeySensorDecorator(AirflowSensorDecorator): When specified, all the keys passed to bucket_key refers to this bucket. (Default:None) wildcard_match : bool whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False) - aws_conn_id : string + aws_conn_id : str a reference to the s3 connection on Airflow. (Default: None) verify : bool Whether or not to verify SSL certificates for S3 connection. (Default: None) diff --git a/metaflow/plugins/aws/batch/batch_decorator.py b/metaflow/plugins/aws/batch/batch_decorator.py index 33caaa2023..8b6de6c86a 100644 --- a/metaflow/plugins/aws/batch/batch_decorator.py +++ b/metaflow/plugins/aws/batch/batch_decorator.py @@ -79,7 +79,7 @@ class BatchDecorator(StepDecorator): The value for the size (in MiB) of the tmpfs mount for this step. This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the memory allocated for this step. - tmpfs_path: string, optional, default None + tmpfs_path: str, optional, default None Path to tmpfs mount for this step. Defaults to /metaflow_temp. inferentia : int, default 0 Number of Inferentia chips required for this step.