Source code for airflow.providers.amazon.aws.operators.batch
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""AWS Batch services... seealso:: - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html - https://docs.aws.amazon.com/batch/latest/APIReference/Welcome.html"""from__future__importannotationsfromcollections.abcimportSequencefromdatetimeimporttimedeltafromfunctoolsimportcached_propertyfromtypingimportTYPE_CHECKING,Anyfromairflow.configurationimportconffromairflow.exceptionsimportAirflowExceptionfromairflow.modelsimportBaseOperatorfromairflow.models.mappedoperatorimportMappedOperatorfromairflow.providers.amazon.aws.hooks.batch_clientimportBatchClientHookfromairflow.providers.amazon.aws.links.batchimport(BatchJobDefinitionLink,BatchJobDetailsLink,BatchJobQueueLink,)fromairflow.providers.amazon.aws.links.logsimportCloudWatchEventsLinkfromairflow.providers.amazon.aws.triggers.batchimport(BatchCreateComputeEnvironmentTrigger,BatchJobTrigger,)fromairflow.providers.amazon.aws.utilsimporttrim_none_values,validate_execute_complete_eventfromairflow.providers.amazon.aws.utils.task_log_fetcherimportAwsTaskLogFetcherifTYPE_CHECKING:fromairflow.utils.contextimportContext
[docs]classBatchOperator(BaseOperator):""" Execute a job on AWS Batch. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:BatchOperator` :param job_name: the name for the job that will run on AWS Batch (templated) :param job_definition: the job definition name on AWS Batch :param job_queue: the queue name on AWS Batch :param container_overrides: the `containerOverrides` parameter for boto3 (templated) :param ecs_properties_override: the `ecsPropertiesOverride` parameter for boto3 (templated) :param eks_properties_override: the `eksPropertiesOverride` parameter for boto3 (templated) :param node_overrides: the `nodeOverrides` parameter for boto3 (templated) :param share_identifier: The share identifier for the job. Don't specify this parameter if the job queue doesn't have a scheduling policy. :param scheduling_priority_override: The scheduling priority for the job. Jobs with a higher scheduling priority are scheduled before jobs with a lower scheduling priority. This overrides any scheduling priority in the job definition :param array_properties: the `arrayProperties` parameter for boto3 :param parameters: the `parameters` for boto3 (templated) :param job_id: the job ID, usually unknown (None) until the submit_job operation gets the jobId defined by AWS Batch :param waiters: an :py:class:`.BatchWaiters` object (see note below); if None, polling is used with max_retries and status_retries. :param max_retries: exponential back-off retries, 4200 = 48 hours; polling is only used when waiters is None :param status_retries: number of HTTP retries to get job status, 10; polling is only used when waiters is None :param aws_conn_id: connection id of AWS credentials / region name. If None, credential boto3 strategy will be used. :param region_name: region name to use in AWS Hook. Override the region_name in connection (if provided) :param tags: collection of tags to apply to the AWS Batch job submission if None, no tags are submitted :param deferrable: Run operator in the deferrable mode. :param awslogs_enabled: Specifies whether logs from CloudWatch should be printed or not, False. If it is an array job, only the logs of the first task will be printed. :param awslogs_fetch_interval: The interval with which cloudwatch logs are to be fetched, 30 sec. :param poll_interval: (Deferrable mode only) Time in seconds to wait between polling. :param submit_job_timeout: Execution timeout in seconds for submitted batch job. .. note:: Any custom waiters must return a waiter for these calls: .. code-block:: python waiter = waiters.get_waiter("JobExists") waiter = waiters.get_waiter("JobRunning") waiter = waiters.get_waiter("JobComplete") """
[docs]defoperator_extra_links(self):op_extra_links=[BatchJobDetailsLink()]ifisinstance(self,MappedOperator):wait_for_completion=self.partial_kwargs.get("wait_for_completion")orself.expand_input.value.get("wait_for_completion")array_properties=self.partial_kwargs.get("array_properties")orself.expand_input.value.get("array_properties")else:wait_for_completion=self.wait_for_completionarray_properties=self.array_propertiesifwait_for_completion:op_extra_links.extend([BatchJobDefinitionLink(),BatchJobQueueLink()])ifnotarray_properties:# There is no CloudWatch Link to the parent Batch Job available.op_extra_links.append(CloudWatchEventsLink())returntuple(op_extra_links)
[docs]defexecute(self,context:Context)->str|None:""" Submit and monitor an AWS Batch job. :raises: AirflowException """self.submit_job(context)ifself.deferrable:ifnotself.job_id:raiseAirflowException("AWS Batch job - job_id was not found")job=self.hook.get_job_description(self.job_id)job_status=job.get("status")ifjob_status==self.hook.SUCCESS_STATE:self.log.info("Job completed.")returnself.job_idelifjob_status==self.hook.FAILURE_STATE:raiseAirflowException(f"Error while running job: {self.job_id} is in {job_status} state")elifjob_statusinself.hook.INTERMEDIATE_STATES:self.defer(timeout=self.execution_timeout,trigger=BatchJobTrigger(job_id=self.job_id,waiter_max_attempts=self.max_retries,aws_conn_id=self.aws_conn_id,region_name=self.region_name,waiter_delay=self.poll_interval,),method_name="execute_complete",)raiseAirflowException(f"Unexpected status: {job_status}")ifself.wait_for_completion:self.monitor_job(context)returnself.job_id
[docs]defexecute_complete(self,context:Context,event:dict[str,Any]|None=None)->str:validated_event=validate_execute_complete_event(event)ifvalidated_event["status"]!="success":raiseAirflowException(f"Error while running job: {validated_event}")self.log.info("Job completed.")returnvalidated_event["job_id"]
[docs]defon_kill(self):response=self.hook.client.terminate_job(jobId=self.job_id,reason="Task killed by the user")self.log.info("AWS Batch job (%s) terminated: %s",self.job_id,response)
[docs]defmonitor_job(self,context:Context):""" Monitor an AWS Batch job. This can raise an exception or an AirflowTaskTimeout if the task was created with ``execution_timeout``. """ifnotself.job_id:raiseAirflowException("AWS Batch job - job_id was not found")try:job_desc=self.hook.get_job_description(self.job_id)job_definition_arn=job_desc["jobDefinition"]job_queue_arn=job_desc["jobQueue"]self.log.info("AWS Batch job (%s) Job Definition ARN: %r, Job Queue ARN: %r",self.job_id,job_definition_arn,job_queue_arn,)exceptKeyError:self.log.warning("AWS Batch job (%s) can't get Job Definition ARN and Job Queue ARN",self.job_id)else:BatchJobDefinitionLink.persist(context=context,operator=self,region_name=self.hook.conn_region_name,aws_partition=self.hook.conn_partition,job_definition_arn=job_definition_arn,)BatchJobQueueLink.persist(context=context,operator=self,region_name=self.hook.conn_region_name,aws_partition=self.hook.conn_partition,job_queue_arn=job_queue_arn,)ifself.awslogs_enabled:ifself.waiters:self.waiters.wait_for_job(self.job_id,get_batch_log_fetcher=self._get_batch_log_fetcher)else:self.hook.wait_for_job(self.job_id,get_batch_log_fetcher=self._get_batch_log_fetcher)else:ifself.waiters:self.waiters.wait_for_job(self.job_id)else:self.hook.wait_for_job(self.job_id)awslogs=[]try:awslogs=self.hook.get_job_all_awslogs_info(self.job_id)exceptAirflowExceptionasae:self.log.warning("Cannot determine where to find the AWS logs for this Batch job: %s",ae)ifawslogs:self.log.info("AWS Batch job (%s) CloudWatch Events details found. Links to logs:",self.job_id)link_builder=CloudWatchEventsLink()forloginawslogs:self.log.info(link_builder.format_link(**log))iflen(awslogs)>1:# there can be several log streams on multi-node jobsself.log.warning("out of all those logs, we can only link to one in the UI. Using the first one.")CloudWatchEventsLink.persist(context=context,operator=self,region_name=self.hook.conn_region_name,aws_partition=self.hook.conn_partition,**awslogs[0],)self.hook.check_job_success(self.job_id)self.log.info("AWS Batch job (%s) succeeded",self.job_id)
[docs]classBatchCreateComputeEnvironmentOperator(BaseOperator):""" Create an AWS Batch compute environment. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:BatchCreateComputeEnvironmentOperator` :param compute_environment_name: Name of the AWS batch compute environment (templated). :param environment_type: Type of the compute-environment. :param state: State of the compute-environment. :param compute_resources: Details about the resources managed by the compute-environment (templated). More details: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.create_compute_environment :param unmanaged_v_cpus: Maximum number of vCPU for an unmanaged compute environment. This parameter is only supported when the ``type`` parameter is set to ``UNMANAGED``. :param service_role: IAM role that allows Batch to make calls to other AWS services on your behalf (templated). :param tags: Tags that you apply to the compute-environment to help you categorize and organize your resources. :param poll_interval: How long to wait in seconds between 2 polls at the environment status. Only useful when deferrable is True. :param max_retries: How many times to poll for the environment status. Only useful when deferrable is True. :param aws_conn_id: Connection ID of AWS credentials / region name. If None, credential boto3 strategy will be used. :param region_name: Region name to use in AWS Hook. Overrides the ``region_name`` in connection if provided. :param deferrable: If True, the operator will wait asynchronously for the environment to be created. This mode requires aiobotocore module to be installed. (default: False) """
[docs]defhook(self):"""Create and return a BatchClientHook."""returnBatchClientHook(aws_conn_id=self.aws_conn_id,region_name=self.region_name,)
[docs]defexecute(self,context:Context):"""Create an AWS batch compute environment."""kwargs:dict[str,Any]={"computeEnvironmentName":self.compute_environment_name,"type":self.environment_type,"state":self.state,"unmanagedvCpus":self.unmanaged_v_cpus,"computeResources":self.compute_resources,"serviceRole":self.service_role,"tags":self.tags,}response=self.hook.client.create_compute_environment(**trim_none_values(kwargs))arn=response["computeEnvironmentArn"]ifself.deferrable:self.defer(trigger=BatchCreateComputeEnvironmentTrigger(arn,self.poll_interval,self.max_retries,self.aws_conn_id,self.region_name),method_name="execute_complete",)self.log.info("AWS Batch compute environment created successfully")returnarn
[docs]defexecute_complete(self,context:Context,event:dict[str,Any]|None=None)->str:validated_event=validate_execute_complete_event(event)ifvalidated_event["status"]!="success":raiseAirflowException(f"Error while waiting for the compute environment to be ready: {validated_event}")returnvalidated_event["value"]