Source code for airflow.providers.amazon.aws.sensors.batch
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.from__future__importannotationsfromcollections.abcimportSequencefromdatetimeimporttimedeltafromtypingimportTYPE_CHECKING,Anyfromairflow.configurationimportconffromairflow.exceptionsimportAirflowExceptionfromairflow.providers.amazon.aws.hooks.batch_clientimportBatchClientHookfromairflow.providers.amazon.aws.sensors.base_awsimportAwsBaseSensorfromairflow.providers.amazon.aws.triggers.batchimportBatchJobTriggerfromairflow.providers.amazon.aws.utils.mixinsimportaws_template_fieldsifTYPE_CHECKING:fromairflow.utils.contextimportContext
[docs]classBatchSensor(AwsBaseSensor[BatchClientHook]):""" Poll the state of the Batch Job until it reaches a terminal state; fails if the job fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchSensor` :param job_id: Batch job_id to check the state for :param aws_conn_id: The Airflow connection used for AWS credentials. If this is ``None`` or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used. :param verify: Whether or not to verify SSL certificates. See: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html :param deferrable: Run sensor in the deferrable mode. :param poke_interval: polling period in seconds to check for the status of the job. :param max_retries: Number of times to poll for job state before returning the current state. """
[docs]defexecute_complete(self,context:Context,event:dict[str,Any])->None:""" Execute when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was successful. """ifevent["status"]!="success":raiseAirflowException(f"Error while running job: {event}")job_id=event["job_id"]self.log.info("Batch Job %s complete",job_id)
[docs]classBatchComputeEnvironmentSensor(AwsBaseSensor[BatchClientHook]):""" Poll the state of the Batch environment until it reaches a terminal state; fails if the environment fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchComputeEnvironmentSensor` :param compute_environment: Batch compute environment name :param aws_conn_id: The Airflow connection used for AWS credentials. If this is ``None`` or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used. :param verify: Whether or not to verify SSL certificates. See: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html """
[docs]classBatchJobQueueSensor(AwsBaseSensor[BatchClientHook]):""" Poll the state of the Batch job queue until it reaches a terminal state; fails if the queue fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchJobQueueSensor` :param job_queue: Batch job queue name :param treat_non_existing_as_deleted: If True, a non-existing Batch job queue is considered as a deleted queue and as such a valid case. :param aws_conn_id: The Airflow connection used for AWS credentials. If this is ``None`` or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used. :param verify: Whether or not to verify SSL certificates. See: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html """