Source code for airflow.providers.amazon.aws.sensors.batch
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.from__future__importannotationsfromdatetimeimporttimedeltafromfunctoolsimportcached_propertyfromtypingimportTYPE_CHECKING,Any,Sequencefromairflow.configurationimportconffromairflow.exceptionsimportAirflowExceptionfromairflow.providers.amazon.aws.hooks.batch_clientimportBatchClientHookfromairflow.providers.amazon.aws.triggers.batchimportBatchJobTriggerfromairflow.sensors.baseimportBaseSensorOperatorifTYPE_CHECKING:fromairflow.utils.contextimportContext
[docs]classBatchSensor(BaseSensorOperator):""" Poll the state of the Batch Job until it reaches a terminal state; fails if the job fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchSensor` :param job_id: Batch job_id to check the state for :param aws_conn_id: aws connection to use, defaults to 'aws_default' If this is None or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: aws region name associated with the client :param deferrable: Run sensor in the deferrable mode. :param poke_interval: polling period in seconds to check for the status of the job. :param max_retries: Number of times to poll for job state before returning the current state. """
[docs]defexecute_complete(self,context:Context,event:dict[str,Any])->None:""" Execute when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was successful. """ifevent["status"]!="success":raiseAirflowException(f"Error while running job: {event}")job_id=event["job_id"]self.log.info("Batch Job %s complete",job_id)
[docs]classBatchComputeEnvironmentSensor(BaseSensorOperator):""" Poll the state of the Batch environment until it reaches a terminal state; fails if the environment fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchComputeEnvironmentSensor` :param compute_environment: Batch compute environment name :param aws_conn_id: aws connection to use, defaults to 'aws_default' If this is None or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: aws region name associated with the client """
[docs]defhook(self)->BatchClientHook:"""Create and return a BatchClientHook."""returnBatchClientHook(aws_conn_id=self.aws_conn_id,region_name=self.region_name,)
[docs]classBatchJobQueueSensor(BaseSensorOperator):""" Poll the state of the Batch job queue until it reaches a terminal state; fails if the queue fails. .. seealso:: For more information on how to use this sensor, take a look at the guide: :ref:`howto/sensor:BatchJobQueueSensor` :param job_queue: Batch job queue name :param treat_non_existing_as_deleted: If True, a non-existing Batch job queue is considered as a deleted queue and as such a valid case. :param aws_conn_id: aws connection to use, defaults to 'aws_default' If this is None or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: aws region name associated with the client """
[docs]defhook(self)->BatchClientHook:"""Create and return a BatchClientHook."""returnBatchClientHook(aws_conn_id=self.aws_conn_id,region_name=self.region_name,)